File ‹More_Library.ML›
signature LIBRARY =
sig
include LIBRARY
val flip : ('a -> 'b -> 'c) -> 'b -> 'a -> 'c
val list_of_pair : 'a * 'a -> 'a list
val pair_of_list : 'a list -> 'a * 'a
val list_of_triple : 'a * 'a * 'a -> 'a list
val triple_of_list : 'a list -> 'a * 'a * 'a
val compare_each : ('a * 'b -> 'c) -> 'a list -> 'b list -> 'c list
val numdup : ('a * 'a -> bool) -> 'a list -> int
val rotate_list : 'a list -> 'a list
val min_list : int list -> int
end;
structure Library: LIBRARY =
struct
open Library;
fun flip f x y = f y x
fun list_of_pair (x, y) = [x, y];
fun pair_of_list [x, y] = (x, y)
| pair_of_list _ = raise Fail "pair_of_list";
fun list_of_triple (x, y, z) = [x, y, z];
fun triple_of_list [x, y, z] = (x, y, z)
| triple_of_list _ = raise Fail "triple_of_list";
fun compare_each _ [] [] = []
| compare_each eq (x::xs) (y::ys) = (eq (x, y)) :: compare_each eq xs ys
| compare_each _ [] (_::_) = raise Empty
| compare_each _ (_::_) [] = raise Empty;
fun numdup eq xs = length xs - length (distinct eq xs);
fun rotate_list xs = tl xs @ [hd xs];
fun min_list xs =
fold (fn x => fn min => if x < min then x else min) (tl xs) (hd xs);
end;
open Library;File ‹More_Term.ML›
signature TERM =
sig
include TERM
val is_cv : term -> bool
val sort_of_tvar : typ -> sort
val sort_eqT : theory -> typ * typ -> bool
end;
structure Term: TERM =
struct
open Term;
fun is_cv t = is_Const t orelse is_Var t
fun sort_of_tvar (TVar (_, S)) = S
| sort_of_tvar (TFree (_, S)) = S
| sort_of_tvar T =
raise TYPE ("the type is not a type variable", single T, [])
fun sort_eqT thy (T, U) =
let val algebra = Sign.classes_of thy
in Sorts.sort_eq algebra (sort_of_tvar T, sort_of_tvar U) end;
end;File ‹More_Logic.ML›
signature LOGIC =
sig
include LOGIC
val forall_elim_all : term -> term * (string * typ) list
val get_forall_ftv_permute : term -> term * ((string * typ) list * int list)
end
structure Logic: LOGIC =
struct
open Logic;
fun forall_elim_all t =
let
fun forall_elim_all_impl t ftv_specs =
let val (ftv_spec, t) = Logic.dest_all t
in forall_elim_all_impl t (ftv_spec::ftv_specs) end
handle TERM ("dest_all", _) => (t, ftv_specs)
in forall_elim_all_impl t [] ||> rev end;
fun get_forall_ftv_permute t =
let
val (t', forall_ftv_specs) = forall_elim_all t
val ftv_specs = Term.add_frees t' [] |> rev
val call_ftv_specs = ftv_specs
|> subtract op= (ftv_specs |> subtract op= forall_ftv_specs)
val index_of_ftv =
(call_ftv_specs ~~ (0 upto (length call_ftv_specs - 1)))
|> AList.lookup op= #> the
val forall_ftv_permute = map index_of_ftv forall_ftv_specs
in (t', (forall_ftv_specs, forall_ftv_permute)) end;
end;File ‹More_Tactical.ML›
signature TACTICAL =
sig
include TACTICAL
val FIRST_APPEND' : ('a -> tactic) list -> 'a -> tactic
end;
structure Tactical: TACTICAL =
struct
open Tactical;
fun FIRST_APPEND' tacs = fold_rev (curry op APPEND') tacs (K no_tac);
end;
open Tactical;File ‹More_Simplifier.ML›
structure More_Simplifier =
struct
open More_Simplifier;
fun rewrite_simp_opt' ctxt simp_spec_opt = case simp_spec_opt of
SOME simp_spec =>
var_simplify_only
ctxt
(Attrib.eval_thms ctxt (single simp_spec))
| NONE => Simplifier.full_simplify ctxt;
end;File ‹More_HOLogic.ML›
signature HOLOGIC =
sig
include HOLOGIC
val dest_exists : term -> string * typ * term
val mk_type_definition_pred : typ -> typ -> term
val dest_type_definition : term -> term * term * term
val is_binrelvarT : typ -> bool
val dest_SetT : typ -> typ
val dest_SetTFree: typ -> string * sort
val is_setT : typ -> bool
val is_var_setT : typ -> bool
end;
structure HOLogic: HOLOGIC =
struct
open HOLogic;
fun dest_exists ((Const (\<^const_name>‹HOL.Ex›, _) $ Abs (c, U, t))) = (c, U, t)
| dest_exists t = raise TERM ("dest_exists", single t);
fun mk_type_definition_pred T U = Const
(
\<^const_name>‹type_definition›,
(T --> U) --> (U --> T) --> HOLogic.mk_setT U --> HOLogic.boolT
);
fun dest_type_definition
(Const (\<^const_name>‹type_definition›, _) $ rept $ abst $ sett) =
(rept, abst, sett)
| dest_type_definition t = raise TERM ("dest_type_definition", single t);
fun is_binrelvarT
(
Type
(
\<^type_name>‹fun›,
[
TVar sT,
Type (\<^type_name>‹fun›, [TVar sU, Type (\<^type_name>‹HOL.bool›, [])])
]
)
) = not (sT = sU)
| is_binrelvarT _ = false;
fun is_setT (Type (\<^type_name>‹Set.set›, _)) = true
| is_setT _ = false
fun is_var_setT (Type (\<^type_name>‹Set.set›, [TVar _])) = true
| is_var_setT (Type (\<^type_name>‹Set.set›, [TFree _])) = true
| is_var_setT _ = false
fun dest_SetT (Type (\<^type_name>‹Set.set›, [T])) = T
| dest_SetT T = raise TYPE("dest_SetT", single T, []);
fun dest_SetTFree (Type (\<^type_name>‹Set.set›, [T])) = dest_TFree T
| dest_SetTFree T = raise TYPE("dest_SetTFree", single T, []);
end;File ‹More_Transfer.ML›
signature TRANSFER =
sig
include TRANSFER
val mk_rel_sc : string -> term -> term
val mk_bi_unique : term -> term
val mk_right_total : term -> term
val mk_transfer_rels : thm list -> thm list
end
structure Transfer: TRANSFER =
struct
open Transfer;
fun mk_rel_sc c t = Const (c, type_of t --> HOLogic.boolT) $ t;
fun mk_bi_unique t = mk_rel_sc \<^const_name>‹Transfer.bi_unique› t;
fun mk_right_total t = mk_rel_sc \<^const_name>‹Transfer.right_total› t;
fun mk_transfer_rels tr_thms =
let
val tr_to_tr_rel_thm = @{thm tr_to_tr_rel};
val ct = Thm.cprems_of tr_to_tr_rel_thm |> the_single
val tr_thms = tr_thms
|>
(
(
fn tr_thm =>
Thm.first_order_match (ct, (tr_thm |> Thm.cprop_of))
)
|> map
)
|> map (fn inst => Drule.instantiate_normalize inst tr_to_tr_rel_thm)
|> curry (swap #> op~~) (map single tr_thms)
|> map op OF
in tr_thms end;
end
File ‹ETTS_Writer.ML›
signature ETTS_WRITER =
sig
val initialize : int -> int list
val increment_index : int -> int list -> int list
val write_action : string -> int list -> int list
end;
structure ETTS_Writer : ETTS_WRITER =
struct
fun initialize length = replicate length 1
fun index_to_string ns = ns
|> rev
|> map Int.toString
|> String.concatWith ".";
fun increment_index i ns =
let
val i = length ns - i - 1
val ns = nth_map i (fn n => n + 1) ns
val (ns_lhs, ns_rhs) = chop i ns
val ns_lhs = map (K 1) ns_lhs
in ns_lhs @ ns_rhs end;
fun write_action c ns =
let
val c = index_to_string ns ^ ". " ^ c
val ns = (hd ns + 1) :: tl ns
val _ = writeln c
in ns end;
end;Theory ETTS
section‹Isar commands and default settings for the ETTS›
theory ETTS
imports
"ETTS_Tools/ETTS_Tools"
"Conditional_Transfer_Rule.CTR"
"HOL-Types_To_Sets.Types_To_Sets"
"HOL-Eisbach.Eisbach"
keywords "tts_register_sbts" :: thy_goal_stmt
and "tts_find_sbts" :: diag
and "tts_theorem" "tts_lemma" "tts_corollary" "tts_proposition" ::
thy_goal_stmt
and "tts_lemmas" :: thy_defn
and "tts_context" :: thy_decl_block
and "tts"
and "to"
and "sbterms"
and "substituting"
and "given"
and "applying"
and "rewriting"
and "eliminating"
and "through"
begin
subsection‹Prerequisites›
subsubsection‹Transfer for local typedef›
text‹
The following content was ported from the content of the session
‹Types_To_Sets› in the main library of Isabelle/HOL with minor amendments.
›
context
fixes Rep Abs A T
assumes type: "type_definition Rep Abs A"
assumes T_def: "T ≡ (λ(x::'a) (y::'b). x = Rep y)"
begin
lemma type_definition_Domainp':
"is_equality a ⟹ Transfer.Rel a (Domainp T) (λx. x ∈ A)"
proof -
interpret type_definition Rep Abs A by (rule type)
show "is_equality a ⟹ Transfer.Rel a (Domainp T) (λx. x ∈ A)"
unfolding is_equality_def Transfer.Rel_def
by (elim ssubst, unfold Domainp_iff[abs_def] T_def fun_eq_iff)
(metis Abs_inverse Rep)
qed
lemma type_definition_Domainp: "Domainp T = (λx. x ∈ A)"
proof -
interpret type_definition Rep Abs A by (rule type)
show ?thesis
unfolding Domainp_iff[abs_def] T_def fun_eq_iff by (metis Abs_inverse Rep)
qed
lemma type_definition_Rangep: "Rangep T = (λx. True)"
proof -
interpret type_definition Rep Abs A by (rule type)
show ?thesis unfolding T_def by auto
qed
lemma
shows rep_in_S[simp]: "Rep x ∈ A"
and rep_inverse[simp]: "Abs (Rep x) = x"
and Abs_inverse[simp]: "y ∈ A ⟹ Rep (Abs y) = y"
using type unfolding type_definition_def by auto
end
lemmas [transfer_rule] =
right_total_All_transfer
right_total_UNIV_transfer
right_total_Ex_transfer
subsubsection‹Auxiliary›
lemma ex_type_definition:
fixes A :: "['a, 'b] ⇒ bool"
assumes "right_total A" and "bi_unique A"
shows
"∃(Rep::'b ⇒ 'a) (Abs::'a ⇒ 'b).
type_definition Rep Abs (Collect (Domainp A)) ∧
(∀b b'. A b b' = (b = Rep b'))"
proof(unfold type_definition_def, intro exI conjI; intro allI)
define Rep :: "'b ⇒ 'a" where Rep: "Rep = (λb'. (SOME b. A b b'))"
define Abs :: "'a ⇒ 'b" where Abs: "Abs = (λb. (SOME b'. A b b'))"
have Rep_b: "A (Rep b') b'" for b'
unfolding Rep by (metis assms(1) right_totalE verit_sko_ex')
have Abs_a: "b ∈ Collect (Domainp A) ⟹ A b (Abs b)" for b
unfolding Abs by (simp add: assms(1) Domainp_iff someI_ex)
show "Rep x ∈ Collect (Domainp A)" for x by (auto intro: Rep_b)
show "Abs (Rep x) = x" for x
using assms(2) by (auto dest: bi_uniqueDr intro: Abs_a Rep_b)
show "y ∈ Collect (Domainp A) ⟶ Rep (Abs y) = y" for y
using assms(2) by (auto dest: bi_uniqueDl intro: Abs_a Rep_b)
show "A b b' = (b = Rep b')" for b b'
using assms(2) by (meson Rep_b bi_uniqueDl)
qed
lemma ex_eq: "∃x. x = t" by simp
subsection‹Import›
ML_file‹ETTS_Tactics.ML›
ML_file‹ETTS_Utilities.ML›
ML_file‹ETTS_RI.ML›
ML_file‹ETTS_Substitution.ML›
ML_file‹ETTS_Context.ML›
ML_file‹ETTS_Algorithm.ML›
ML_file‹ETTS_Active.ML›
ML_file‹ETTS_Lemma.ML›
ML_file‹ETTS_Lemmas.ML›
subsection‹Commands and attributes›
ML ‹
ETTS_Lemma.tts_lemma \<^command_keyword>‹tts_theorem› "tts theorem";
ETTS_Lemma.tts_lemma \<^command_keyword>‹tts_lemma› "tts lemma";
ETTS_Lemma.tts_lemma \<^command_keyword>‹tts_corollary› "tts corollary";
ETTS_Lemma.tts_lemma \<^command_keyword>‹tts_proposition› "tts proposition";
›
subsection‹Default settings›
subsubsection‹\<^text>‹tts_implicit››
named_theorems tts_implicit
subsubsection‹\<^text>‹tts_transfer_rule››
lemmas [transfer_rule] =
right_total_UNIV_transfer
right_total_Collect_transfer
right_total_Inter_transfer
right_total_Compl_transfer
finite_transfer
image_transfer
endFile ‹ETTS_Tactics.ML›
signature ETTS_TACTICS =
sig
val cond_red_tac :
Proof.context ->
term ->
(Proof.context -> tactic) ->
thm ->
int ->
tactic
val id_tac : thm -> int -> tactic
val prem_red :
Proof.context -> (term list * (Proof.context -> tactic)) -> thm -> thm
end;
structure ETTS_Tactics : ETTS_TACTICS =
struct
fun id_tac assm_thm i =
let fun id_tac_impl assm_thm thm = Thm.implies_elim thm assm_thm;
in SELECT_GOAL (PRIMITIVE (id_tac_impl assm_thm)) i end;
fun cond_red_tac ctxt condt cond_tac thm i =
Induct.cases_tac ctxt false ((SOME condt) |> single |> single) NONE [] i
THEN Local_Defs.unfold_tac ctxt (single @{thm not_not})
THEN SOLVED'
(
SUBPROOF
(
fn {context, prems, ...} =>
Method.insert_tac context prems 1
THEN (cond_tac context)
)
ctxt
)
i
THEN id_tac thm i;
fun prem_red ctxt tac_spec thm =
let
fun rotate_prems_once thm = Drule.rotate_prems 1 thm
handle THM _ => thm
val aterms = #1 tac_spec
fun prem_red_rec thm condn =
let
val prems = Thm.prems_of thm
val condt_opt =
let
fun pass_through_spec t =
if null aterms orelse member Term.could_unify aterms t
then t
else raise TERM ("", [])
in
prems
|> hd
|> HOLogic.dest_Trueprop
|> pass_through_spec
|> HOLogic.mk_not
|> SOME
handle
TERM _ => NONE
| Empty => NONE
end;
val thm' = rotate_prems_once thm
val thm'' = case condt_opt of
SOME condt =>
let val goalt = Logic.list_implies (tl prems, (Thm.concl_of thm))
in
Goal.prove
ctxt
[]
[]
goalt
(cond_red_tac ctxt condt (#2 tac_spec) thm' 1 |> K)
handle
ERROR _ => thm'
| THM _ => thm'
end
| NONE => thm'
val success_flag =
not (Thm.full_prop_of thm'' = Thm.full_prop_of thm')
in
if success_flag
then prem_red_rec thm'' (condn - 1)
else if condn > 1 then prem_red_rec thm' (condn - 1) else thm
end
val thm = Local_Defs.unfold ctxt (single @{thm not_not}) thm
val condn = thm |> Thm.prems_of |> length
val out_thm = rotate_prems_once (prem_red_rec thm condn)
in out_thm end;
end;File ‹ETTS_Utilities.ML›
signature ETTS_UTILITIES =
sig
val term_name_of_type_name : string -> string
val string_of_token_src_list : Token.src list -> string
end;
structure ETTS_Utilities : ETTS_UTILITIES =
struct
fun term_name_of_type_name c =
let val s = substring (c, 1, size c - 1)
in
if s |> String.explode |> map Char.isAlpha |> List.all I
then String.map Char.toUpper s
else "A"
end;
fun string_of_token_src_list ts =
let
val lhs_cs = map (Token.name_of_src #> fst) ts
val rhs_cs = ts
|> map (Token.args_of_src #> map Token.print #> String.concatWith " ")
val cs =
let
fun condc (lhs_c, rhs_c) =
if rhs_c = "" then lhs_c else lhs_c ^ " " ^ rhs_c
in map condc (lhs_cs ~~ rhs_cs) end
in ML_Syntax.print_list I cs end;
end;
open ETTS_Utilities;File ‹ETTS_RI.ML›
signature ETTS_RI =
sig
val is_risset : term -> bool
val dest_rissetT : typ -> string * sort
val dest_rissetFree : term -> string * (string * sort)
val ftv_spec_of_rissetT_spec : string -> string * string list
val type_of_rissetT_spec : string -> typ
val fv_spec_of_rissetFree : string * string -> string * typ
val mk_Domainp_sc : term -> term -> term
val risset_input : Proof.context -> string -> term list -> unit
end;
structure ETTS_RI : ETTS_RI =
struct
fun is_risset t = case type_of t of
(Type (\<^type_name>‹Set.set›, [T])) => is_TFree T
| _ => false
andalso Term.add_tvars t [] |> null
andalso Term.add_vars t [] |> null;
val dest_rissetT = HOLogic.dest_SetTFree;
fun dest_rissetFree (Free (c, T)) = (c, dest_rissetT T)
| dest_rissetFree t = raise TERM("dest_rissetFree", single t);
fun ftv_spec_of_rissetT_spec (c : string) = (c, \<^sort>‹HOL.type›)
fun type_of_rissetT_spec c =
Type (\<^type_name>‹Set.set›, TFree (ftv_spec_of_rissetT_spec c) |> single);
fun fv_spec_of_rissetFree (tc : string, Tc : string) =
(tc, type_of_rissetT_spec Tc);
fun mk_Domainp_sc brelt rissett =
let
val T = rissett |> type_of |> dest_rissetT |> TFree
val lhst =
Const
(
\<^const_name>‹Relation.Domainp›,
(type_of brelt) --> T --> HOLogic.boolT
) $
brelt
val rhst =
let val U = T --> HOLogic.mk_setT T --> HOLogic.boolT
in Abs ("x", T, Const (\<^const_name>‹Set.member›, U) $ Bound 0 $ rissett) end
in HOLogic.mk_eq (lhst, rhst) end;
local
fun get_tvs t = t
|> (fn t => (Term.add_tvars t [], Term.add_tfrees t []))
|>> map TVar
||> map TFree
|> op@;
fun ntv_eq ((TVar (xi, _)), (TVar (xi', _))) = Term.eq_ix (xi, xi')
| ntv_eq (TFree (c, _), TFree (c', _)) = c = c'
| ntv_eq (_, _) = false;
fun ex_eq_sorts_neq_ntvs thy =
partition_eq ntv_eq
#> map (distinct (Term.sort_eqT thy))
#> exists (fn xs => 1 < length xs);
fun get_vs t = t
|> (fn t => (Term.add_vars t [], Term.add_frees t []))
|>> map Var
||> map Free
|> op@;
fun tv_eq ((Var (xi, _)), (Var (xi', _))) = Term.eq_ix (xi, xi')
| tv_eq (Free (c, _), Free (c', _)) = c = c'
| tv_eq (_, _) = false;
fun type_eqT (t, u) =
let
val get_varT = type_of #> HOLogic.dest_SetT
val T = get_varT t
val U = get_varT u
in ntv_eq (T, U) end;
val ex_eq_types_neq_nvs = partition_eq tv_eq
#> map (distinct type_eqT)
#> exists (fn xs => 1 < length xs);
in
fun risset_input ctxt c risset =
let
fun mk_msg_prefix msg = c ^ ": " ^ msg
val msg_riss_not_set = mk_msg_prefix
"risset must be terms of the type of the form ?'a set or 'a set"
val msg_riss_not_ds_dtv = mk_msg_prefix
"risset: type variables with distinct sorts must be distinct"
val msg_riss_not_dt_dv = mk_msg_prefix
"risset: variables with distinct types must be distinct"
val _ = risset |> map (type_of #> HOLogic.is_var_setT) |> List.all I
orelse error msg_riss_not_set
val _ = risset
|> map get_tvs
|> flat
|> ex_eq_sorts_neq_ntvs (Proof_Context.theory_of ctxt)
|> not
orelse error msg_riss_not_ds_dtv
val _ = risset
|> map get_vs
|> flat
|> ex_eq_types_neq_nvs
|> not
orelse error msg_riss_not_dt_dv
in () end;
end;
end;File ‹ETTS_Substitution.ML›
signature ETTS_SUBSTITUTION =
sig
val sbt_data_of : Proof.context -> Ctermtab.key -> thm option
val is_sbt_data_key : Proof.context -> cterm -> bool
val process_tts_register_sbts :
string * string list -> Proof.context -> Proof.state
end;
structure ETTS_Substitution : ETTS_SUBSTITUTION =
struct
open ETTS_Utilities;
open ETTS_RI;
structure SBTData_Args =
struct
type T = thm Ctermtab.table
val empty = Ctermtab.empty
val extend = I
val merge : (T * T -> T) = Ctermtab.merge (K true)
fun init _ = Ctermtab.empty
end;
structure Global_SBTData = Theory_Data (SBTData_Args);
structure Local_SBTData = Proof_Data (SBTData_Args);
val sbt_data_of = Local_SBTData.get #> Ctermtab.lookup;
val sbt_data_keys = Local_SBTData.get #> Ctermtab.keys
fun map_sbt_data f (Context.Proof ctxt) = ctxt
|> Local_SBTData.map f
|> Context.Proof
| map_sbt_data f (Context.Theory thy) = thy
|> Global_SBTData.map f
|> Context.Theory;
fun update_sbt_data k v =
let
fun declaration phi = (Morphism.cterm phi k, Morphism.thm phi v)
|> Ctermtab.update
|> map_sbt_data
in Local_Theory.declaration {pervasive=true, syntax=false} declaration end;
fun is_sbt_data_key ctxt ct = member (op aconvc) (sbt_data_keys ctxt) ct;
fun process_tts_find_sbts args st =
let
val ctxt = Toplevel.context_of st
val args = case args of
[] => sbt_data_keys ctxt
| args => map (ctxt |> Syntax.read_term #> Thm.cterm_of ctxt) args
in
args
|> map (sbt_data_of ctxt #> the #> Thm.string_of_thm ctxt |> apdupr)
|> map (Thm.term_of #> Syntax.string_of_term ctxt |> apfst)
|> map ((fn (c, thmc) => c ^ " : " ^ thmc) #> writeln)
|> K ()
end;
val parse_tts_find_sbts = Parse.and_list Parse.term;
val _ = Outer_Syntax.command
\<^command_keyword>‹tts_find_sbts›
"lookup a theorem associated with a constant or a fixed variable"
(parse_tts_find_sbts >> (process_tts_find_sbts #> Toplevel.keep));
local
fun mk_msg_tts_register_sbts msg = "tts_register_sbts: " ^ msg;
fun mk_goal_register_sbts ctxt sbt risset =
let
val msg_repeated_risset = mk_msg_tts_register_sbts
"the type variables associated with the risset must be distinct"
fun mk_rel_assms (brelt, rissett) =
[
mk_Domainp_sc brelt rissett,
Transfer.mk_bi_unique brelt,
Transfer.mk_right_total brelt
];
val rissetftv_specs = map (type_of #> dest_rissetT) risset
val _ = rissetftv_specs
|> has_duplicates op=
|> not orelse error msg_repeated_risset
val sbt = sbt |> (type_of #> (fn t => Term.add_tfreesT t []) |> apdupr)
val (sbtftv_specs, ctxt) =
let
fun mk_ri_rhs_Ts ctxt f = map (apdupr f)
#> map_slice_side_r (fn Ss => Variable.invent_types Ss ctxt)
in
sbt
|> #2
|> distinct op=
|> dup
|>> inter op= rissetftv_specs
||> subtract op= rissetftv_specs
|>> mk_ri_rhs_Ts ctxt (K \<^sort>‹HOL.type›)
|>> swap
|> reroute_ps_sp
|> swap
|>> apsnd (map dup)
end
val sbt = apsnd (filter (member op= (sbtftv_specs |> #1 |> map #1))) sbt
val (sbtftv_specs, ctxt') =
let val un_of_typ = #1 #> term_name_of_type_name
in
sbtftv_specs
|>> map (apfst un_of_typ #> apsnd un_of_typ |> apdupr)
|>> map (apsnd op^)
|>> map_slice_side_r (fn cs => Variable.variant_fixes cs ctxt)
|>> (apfst TFree #> apsnd TFree |> apdupr |> apfst |> map |> apfst)
|>> (reroute_ps_sp |> map |> apfst)
|>> (swap #> HOLogic.mk_rel |> apsnd |> map |> apfst)
|>> swap
|> reroute_ps_sp
|> swap
|>> (#1 #> TFree #> HOLogic.eq_const |> apdupr |> map |> apsnd)
end
val sbt_specs =
let
val ftv_map = sbtftv_specs
|> #1
|> map (apfst #1)
|> AList.lookup op= #> the
val ftv_map' = sbtftv_specs
|> op@
|> map (apfst #1)
val risset_of_ftv_spec = ((risset |> map (type_of #> dest_rissetT)) ~~ risset)
|> AList.lookup op=
val map_specTs_to_rcdTs = sbtftv_specs
|> op@
|> map (#1 #> apsnd TFree)
|> AList.lookup op= #> the
val (rct_name, ctxt'') = ctxt'
|> Variable.variant_fixes (single "rcdt")
|>> the_single
in
sbt
|>
(
(
ftv_map |> apdupl
#> (risset_of_ftv_spec #> the |> apsnd)
#> mk_rel_assms
|> map
#> flat
#> map HOLogic.mk_Trueprop
|> apsnd
)
#> (#1 #> type_of |> apdupl)
#> (ftv_map' |> CTR_Relators.pr_of_typ ctxt'' |> apfst)
)
|> (fn x => (x, rct_name))
|>
(
(#1 #> #2 #> #1 #> type_of |> apdupr)
#> (map_specTs_to_rcdTs |> map_type_tfree |> apsnd)
#> reroute_ps_sp
#> (Free |> apdupl |> apsnd)
)
|> reroute_sp_ps
|>
(
apfst reroute_sp_ps
#> reroute_ps_sp
#> apsnd swap
|> apfst
#> apfst reroute_sp_ps
#> reroute_ps_sp
#> apsnd swap
#> reroute_sp_ps
)
|>
(
apfst op$
#> op$
|> apfst
#> swap
#> reroute_ps_triple
#> HOLogic.mk_exists
#> HOLogic.mk_Trueprop
#> Syntax.check_term ctxt''
|> apfst
)
|> swap
end
val goal =
let
fun add_premts (premts, conclt) = fold_rev
(fn premt => fn t => Logic.mk_implies (premt, t))
premts
conclt
in add_premts sbt_specs end
in (goal, ctxt') end
in
fun process_tts_register_sbts args ctxt =
let
val msg_fv_not_fixed = mk_msg_tts_register_sbts
"all fixed variables that occur in the sbterm " ^
"must be fixed in the context"
val msg_ftv_not_fixed = mk_msg_tts_register_sbts
"all fixed type variables that occur in the sbterm " ^
"must be fixed in the context"
val msg_sv = mk_msg_tts_register_sbts
"the sbterm must contain no schematic variables"
val msg_stv = mk_msg_tts_register_sbts
"the sbterm must contain no schematic type variables"
val sbt = args
|> #1
|> Syntax.read_term ctxt
val risset = args
|> #2
|> map (Syntax.read_term ctxt)
val _ = ETTS_RI.risset_input ctxt "tts_register_sbts" risset
val _ = sbt
|> (fn t => Term.add_frees t [])
|> distinct op=
|> map #1
|> map (Variable.is_fixed ctxt)
|> List.all I
orelse error msg_fv_not_fixed
val _ = sbt
|> (fn t => Term.add_tfrees t [])
|> distinct op=
|> map #1
|> map (Variable.is_declared ctxt)
|> List.all I
orelse error msg_ftv_not_fixed
val _ = sbt
|> (fn t => Term.add_vars t [])
|> length
|> curry op= 0
orelse error msg_sv
val _ = sbt
|> (fn t => Term.add_tvars t [])
|> length
|> curry op= 0
orelse error msg_stv
val (goalt, _) = mk_goal_register_sbts ctxt sbt risset
val goal_specs = (goalt, []) |> single |> single
val ct = Thm.cterm_of ctxt sbt
fun after_qed thmss lthy = update_sbt_data ct (thmss |> hd |> hd) lthy
in Proof.theorem NONE after_qed goal_specs ctxt end;
end;
val parse_tts_register_sbts =
Parse.term -- (\<^keyword>‹|› |-- Parse.and_list Parse.term);
val _ = Outer_Syntax.local_theory_to_proof
\<^command_keyword>‹tts_register_sbts›
"command for the registration of the set-based terms"
(parse_tts_register_sbts >> process_tts_register_sbts)
end;File ‹ETTS_Context.ML›
signature ETTS_CONTEXT =
sig
type ctxt_def_type
type amend_ctxt_data_input_type
val rule_attrb : string list
val update_tts_ctxt_data : ctxt_def_type -> Proof.context -> Proof.context
val get_tts_ctxt_data : Proof.context -> ctxt_def_type
val string_of_sbrr_opt : (Facts.ref * Token.src list) option -> string
val string_of_subst_thms : (Facts.ref * Token.src list) list -> string
val string_of_mpespc_opt :
Proof.context ->
(term list * (Proof.context -> tactic)) option ->
string
val string_of_amend_context_data_args :
Proof.context -> amend_ctxt_data_input_type -> string
val string_of_tts_ctxt_data : Proof.context -> ctxt_def_type -> string
val amend_context_data :
amend_ctxt_data_input_type -> Proof.context -> ctxt_def_type * Proof.context
val process_tts_context :
amend_ctxt_data_input_type -> Toplevel.transition -> Toplevel.transition
end;
structure ETTS_Context : ETTS_CONTEXT =
struct
val rule_attrb =
["OF", "of", "THEN", "where", "simplified", "folded", "unfolded"];
val is_rule_attrb = member op= rule_attrb;
type ctxt_def_type =
{
rispec : (indexname * term) list,
sbtspec : (term * term) list,
sbrr_opt : (Facts.ref * Token.src list) option,
subst_thms : (Facts.ref * Token.src list) list,
mpespc_opt : (term list * (Proof.context -> tactic)) option,
attrbs : Token.src list
};
type amend_ctxt_data_input_type =
(
(
(
((string * string) list * (string * string) list) *
(Facts.ref * Token.src list) option
) * (Facts.ref * Token.src list) list
) * (string list * (Method.text * (Position.T * Position.T))) option
) * Token.src list;
val init_ctxt_def_type =
{
rispec = [],
sbtspec = [],
sbrr_opt = NONE,
subst_thms = [],
mpespc_opt = NONE,
attrbs = []
};
structure TTSContextData = Proof_Data
(
type T = ctxt_def_type;
fun init _ = init_ctxt_def_type;
);
fun update_tts_ctxt_data value = TTSContextData.map (K value);
fun get_tts_ctxt_data ctxt = TTSContextData.get ctxt;
fun is_empty_tts_ctxt_data (ctxt_data : ctxt_def_type) =
ctxt_data |> #attrbs |> null
andalso ctxt_data |> #mpespc_opt |> is_none
andalso ctxt_data |> #rispec |> null
andalso ctxt_data |> #sbrr_opt |> is_none
andalso ctxt_data |> #subst_thms |> null
andalso ctxt_data |> #sbtspec |> null;
fun mk_mpespc_opt ctxt mpespc_opt_raw =
let
fun mk_mpespc_opt_impl ctxt mpespc_raw =
let
fun prem_red_tac ctxt =
(Method.evaluate ((#2 #> #1) mpespc_raw) ctxt) []
|> Context_Tactic.NO_CONTEXT_TACTIC ctxt
val prems = mpespc_raw
|> #1
|> map (Proof_Context.read_term_pattern ctxt)
in (prems, prem_red_tac) end;
in Option.map (mk_mpespc_opt_impl ctxt) mpespc_opt_raw end;
fun unpack_amend_context_data_args args =
let
val rispec_raw = args |> #1 |> #1 |> #1 |> #1 |> #1
val sbtspec_raw = args |> #1 |> #1 |> #1 |> #1 |> #2
val sbrr_opt_raw = args |> #1 |> #1 |> #1 |> #2
val subst_thms_raw = args |> #1 |> #1 |> #2
val mpespc_opt_raw = args |> #1 |> #2
val attrbs_raw = args |> #2
in
(
rispec_raw,
sbtspec_raw,
sbrr_opt_raw,
subst_thms_raw,
mpespc_opt_raw,
attrbs_raw
)
end;
fun string_of_rispec ctxt = ML_Syntax.print_pair
Term.string_of_vname (Syntax.string_of_term ctxt);
fun string_of_term_pair ctxt =
let val string_of_term = Syntax.string_of_term ctxt
in ML_Syntax.print_pair string_of_term string_of_term end;
val string_of_sbrr_opt =
(ML_Syntax.print_pair Facts.string_of_ref string_of_token_src_list)
|> ML_Syntax.print_option;
val string_of_subst_thms =
ML_Syntax.print_pair Facts.string_of_ref string_of_token_src_list
|> ML_Syntax.print_list;
fun string_of_mpespc_opt ctxt =
let
val tac_c = K "unknown tactic"
val term_cs = (ML_Syntax.print_list (Syntax.string_of_term ctxt))
in ML_Syntax.print_pair term_cs tac_c |> ML_Syntax.print_option end;
fun string_of_amend_context_data_args ctxt args =
let
val
(
rispec_raw,
sbtspec_raw,
sbrr_opt_raw,
subst_thms_raw,
mpespc_opt_raw,
attrbs_raw
) = unpack_amend_context_data_args args
val rispec_c = rispec_raw
|> map (ML_Syntax.print_pair I I)
|> String.concatWith ", "
|> curry op^ "rispec: "
val sbtspec_c = sbtspec_raw
|> map (ML_Syntax.print_pair I I)
|> String.concatWith ", "
|> curry op^ "sbtspec: "
val sbrr_opt_c = sbrr_opt_raw
|> string_of_sbrr_opt
|> curry op^ "sbrr_opt: "
val subst_thms_c = subst_thms_raw
|> string_of_subst_thms
|> curry op^ "subst_thms: "
val mpespc_opt_c = mpespc_opt_raw
|> mk_mpespc_opt ctxt
|> string_of_mpespc_opt ctxt
|> curry op^ "mpespc_opt: "
val attrbs_c = attrbs_raw
|> string_of_token_src_list
|> curry op^ "attrbs: "
val out_c =
[
rispec_c,
sbtspec_c,
sbrr_opt_c,
subst_thms_c,
mpespc_opt_c,
attrbs_c
]
|> String.concatWith "\n"
in out_c end;
fun string_of_tts_ctxt_data ctxt ctxt_data =
let
val rispec_c = ctxt_data
|> #rispec
|> map (string_of_rispec ctxt)
|> String.concatWith ", "
|> curry op^ "rispec: "
val sbtspec_c = ctxt_data
|> #sbtspec
|> map (string_of_term_pair ctxt)
|> String.concatWith ", "
|> curry op^ "sbtspec: "
val sbrr_opt_c = ctxt_data
|> #sbrr_opt
|> string_of_sbrr_opt
|> curry op^ "sbrr_opt: "
val subst_thms_c = ctxt_data
|> #subst_thms
|> string_of_subst_thms
|> curry op^ "subst_thms: "
val mpespc_opt_c = ctxt_data
|> #mpespc_opt
|> string_of_mpespc_opt ctxt
|> curry op^ "mpespc_opt: "
val attrbs_c = ctxt_data
|> #attrbs
|> string_of_token_src_list
|> curry op^ "attrbs: "
val out_c =
[
rispec_c,
sbtspec_c,
sbrr_opt_c,
subst_thms_c,
mpespc_opt_c,
attrbs_c
]
|> String.concatWith "\n"
in out_c end;
fun mk_msg_tts_ctxt_error msg = "tts_context: " ^ msg;
fun rispec_input ctxt rispec =
let
val msg_rispec_empty =
mk_msg_tts_ctxt_error "rispec must not be empty"
val msg_risstv_not_distinct =
mk_msg_tts_ctxt_error "risstvs must be distinct"
val risstv = map #1 rispec
val risset = map #2 rispec
val _ = rispec |> List.null |> not
orelse error msg_rispec_empty
val _ = risstv |> has_duplicates op= |> not
orelse error msg_risstv_not_distinct
val _ = ETTS_RI.risset_input ctxt "tts_context" risset
in () end;
local
fun tv_of_ix (T, U) =
let
fun tv_of_ix ((TVar v), (TFree x)) = [(v, x)]
| tv_of_ix ((Type (c, Ts)), (Type (d, Us))) =
if length Ts = length Us andalso c = d
then (Ts ~~ Us) |> map tv_of_ix |> flat
else raise TYPE ("tv_of_ix", [Type (c, Ts), Type (d, Us)], [])
| tv_of_ix (T, U) = raise TYPE ("tv_of_ix", [T, U], [])
in tv_of_ix (T, U) |> distinct op= end
fun is_fun xs = xs |> map fst |> has_duplicates op= |> not
fun is_bij xs = is_fun xs andalso xs |> map snd |> has_duplicates op= |> not
in
fun sbtspec_input ctxt rispec sbtspec =
let
val msg_tbts_not_stvs = mk_msg_tts_ctxt_error
"the type variables that occur in the tbts must be schematic"
val msg_tbts_distinct_sorts = mk_msg_tts_ctxt_error
"tbts: a single stv should not have two distinct sorts associated with it"
val msg_not_type_instance = mk_msg_tts_ctxt_error
"\n\t-the types of the sbts must be equivalent " ^
"to the types of the tbts up to renaming of the type variables\n" ^
"\t-to each type variable that occurs among the tbts must correspond " ^
"exactly one type variable among all type " ^
"variables that occur among all of the sbts"
val msg_tbts_not_cv = mk_msg_tts_ctxt_error
"tbts must consist of constants and schematic variables"
val msg_tbts_not_distinct = mk_msg_tts_ctxt_error "tbts must be distinct"
val msg_tbts_not_sbt_data_key = mk_msg_tts_ctxt_error
"sbts must be registered using the command tts_register_sbts"
val msg_sbterms_subset_rispec = mk_msg_tts_ctxt_error
"the collection of the (stv, ftv) pairs associated with the sbterms " ^
"must form a subset of the collection of the (stv, ftv) pairs " ^
"associated with the RI specification, provided that only the pairs " ^
"(stv, ftv) associated with the sbterms such that ftv occurs in a " ^
"premise of a theorem associated with an sbterm are taken into account"
val tbts = map #1 sbtspec
val sbts = map #2 sbtspec
val _ = (tbts |> map (fn t => Term.add_tfrees t []) |> flat |> null)
orelse error msg_tbts_not_stvs
val _ = tbts
|> map (fn t => Term.add_tvars t [])
|> flat
|> distinct op=
|> is_fun
orelse error msg_tbts_distinct_sorts
val tbts_sbts_Ts = map type_of tbts ~~ map type_of sbts
|> map tv_of_ix
|> flat
|> distinct op=
val _ = tbts_sbts_Ts
|> is_bij
orelse error msg_not_type_instance
handle TYPE ("tv_of_ix", _, _) => error msg_not_type_instance
val _ = tbts |> map Term.is_cv |> List.all I
orelse error msg_tbts_not_cv
val _ = tbts |> has_duplicates (op aconv) |> not
orelse error msg_tbts_not_distinct
val _ = sbts
|> map (Thm.cterm_of ctxt #> apdupl (K ctxt))
|> map (uncurry ETTS_Substitution.is_sbt_data_key)
|> List.all I
orelse error msg_tbts_not_sbt_data_key
val sbt_ftvs = sbts
|> map (Thm.cterm_of ctxt)
|> map (ETTS_Substitution.sbt_data_of ctxt)
|> filter is_some
|> map the
|> map Thm.prems_of
|> flat
|> map (fn t => Term.add_tfrees t [])
|> flat
val tbts_sbts_Ts' = tbts_sbts_Ts
|> filter (fn (_, ftv) => ftv |> member op= sbt_ftvs)
|> map (apfst fst)
val rispec_ftvs_Ts =
map (apsnd (fn t => t |> type_of |> HOLogic.dest_SetTFree)) rispec
val _ = subset op= (tbts_sbts_Ts', rispec_ftvs_Ts)
orelse error msg_sbterms_subset_rispec
in () end;
end;
fun sbrr_opt_raw_input ctxt (SOME sbrr_raw) =
let val _ = Attrib.eval_thms ctxt (single sbrr_raw) in () end
| sbrr_opt_raw_input _ NONE = ();
fun subst_thms_input ctxt subst_thms_raw =
let val _ = Attrib.eval_thms ctxt subst_thms_raw
in () end;
fun attrbs_input attrbs =
let
val msg_rule_attrbs = mk_msg_tts_ctxt_error
"attrbs: only " ^ String.concatWith ", " rule_attrb ^ " are allowed"
val _ = attrbs
|> map (map Token.unparse #> hd)
|> map is_rule_attrb
|> List.all I
orelse error msg_rule_attrbs
in () end;
fun tts_context_input (ctxt_data : ctxt_def_type) =
let val msg_nested_tts_context = mk_msg_tts_ctxt_error "nested tts contexts"
in is_empty_tts_ctxt_data ctxt_data orelse error msg_nested_tts_context end;
local
val parse_tts =
let
val parse_tts_title = (\<^keyword>‹tts› -- kw_col);
val parse_tts_entry =
(kw_bo |-- Parse.type_var -- (\<^keyword>‹to› |-- Parse.term --| kw_bc));
in parse_tts_title |-- Parse.and_list parse_tts_entry end;
val parse_sbterms =
let
val parse_sbterms_title = (\<^keyword>‹sbterms› -- kw_col);
val parse_sbterms_entry =
(kw_bo |-- Parse.term -- (\<^keyword>‹to› |-- Parse.term --| kw_bc));
in parse_sbterms_title |-- Parse.and_list parse_sbterms_entry end;
val parse_rewriting = (\<^keyword>‹rewriting› |-- Parse.thm);
val parse_substituting = (\<^keyword>‹substituting› |-- Parse.and_list Parse.thm);
val parse_eliminating =
let
val parse_eliminating_pattern = Parse.and_list Parse.term;
val parse_eliminating_method = (\<^keyword>‹through› |-- Method.parse);
in
(
\<^keyword>‹eliminating› |--
(
Scan.optional (parse_eliminating_pattern) [] --
parse_eliminating_method
)
)
end;
val parse_applying = (\<^keyword>‹applying› |-- Parse.attribs);
in
val parse_tts_context =
parse_tts --
Scan.optional parse_sbterms [] --
Scan.option parse_rewriting --
Scan.optional parse_substituting [] --
Scan.option parse_eliminating --
Scan.optional parse_applying [];
end;
local
fun mk_rispec ctxt rispec_raw =
let val ctxt' = Proof_Context.init_global (Proof_Context.theory_of ctxt)
in
rispec_raw
|> map (ctxt' |> Syntax.parse_typ #> dest_TVar #> #1 |> apfst)
|> map (ctxt |> Syntax.read_term |> apsnd)
end;
fun mk_sbtspec ctxt sbtspec_raw =
let val ctxt' = Proof_Context.init_global (Proof_Context.theory_of ctxt)
in
sbtspec_raw
|> map (ctxt' |> Proof_Context.read_term_pattern |> apfst)
|> map (ctxt |> Syntax.read_term |> apsnd)
end;
in
fun amend_context_data args ctxt =
let
val _ = ctxt |> get_tts_ctxt_data |> tts_context_input
val
(
rispec_raw,
sbtspec_raw,
sbrr_opt_raw,
subst_thms_raw,
mpespc_opt_raw,
attrbs_raw
) = unpack_amend_context_data_args args
val rispec = mk_rispec ctxt rispec_raw
val sbtspec = mk_sbtspec ctxt sbtspec_raw
val mpespc_opt = mk_mpespc_opt ctxt mpespc_opt_raw
val _ = rispec_input ctxt rispec
val _ = sbtspec_input ctxt rispec sbtspec
val _ = sbrr_opt_raw_input ctxt sbrr_opt_raw
val _ = subst_thms_input ctxt subst_thms_raw
val _ = attrbs_input attrbs_raw
val ctxt_def : ctxt_def_type =
{
rispec = rispec,
sbtspec = sbtspec,
subst_thms = subst_thms_raw,
sbrr_opt = sbrr_opt_raw,
mpespc_opt = mpespc_opt,
attrbs = attrbs_raw
}
in (ctxt_def, update_tts_ctxt_data ctxt_def ctxt) end;
end;
fun tts_gen_context args gthy = gthy
|> Context.cases Named_Target.theory_init Local_Theory.assert
|> amend_context_data args
|> snd
|> Local_Theory.begin_nested
|> snd;
fun process_tts_context (args: amend_ctxt_data_input_type) =
Toplevel.begin_nested_target (tts_gen_context args);
val _ = Outer_Syntax.command
\<^command_keyword>‹tts_context›
"context for the relativization of facts"
((parse_tts_context >> process_tts_context) --| Parse.begin);
end;File ‹ETTS_Algorithm.ML›
signature ETTS_ALGORITHM =
sig
val mk_local_typedef_ex : (string * sort) * term -> term
val dest_local_typedef_ex : term -> typ * term
datatype etts_output_type = default | verbose | active
val etts_output_type_of_string : string -> etts_output_type
val string_of_etts_output_type : etts_output_type -> string
val is_verbose : etts_output_type -> bool
val is_active : etts_output_type -> bool
val is_default : etts_output_type -> bool
val etts_algorithm :
Proof.context ->
etts_output_type ->
int list ->
(indexname * term) list ->
(term * term) list ->
(Facts.ref * Token.src list) option ->
(Facts.ref * Token.src list) list ->
(term list * (Proof.context -> tactic)) option ->
Token.src list ->
thm ->
(thm * int list) * Proof.context
val etts_fact :
Proof.context ->
etts_output_type ->
int list ->
(indexname * term) list ->
(term * term) list ->
(Facts.ref * Token.src list) option ->
(Facts.ref * Token.src list) list ->
(term list * (Proof.context -> tactic)) option ->
Token.src list ->
thm list ->
(thm list * int list) * Proof.context
end;
structure ETTS_Algorithm : ETTS_ALGORITHM =
struct
open UD_With;
open ETTS_Utilities;
open ETTS_RI;
open ETTS_Substitution;
fun mk_local_typedef_ex (rcd_spec, rissett) =
let
val T = TFree rcd_spec
val risset_ftv = rissett
|> type_of
|> (fn T => Term.add_tfreesT T [])
|> the_single
|> TFree
in
HOLogic.mk_exists
(
"rep",
T --> risset_ftv,
HOLogic.mk_exists
(
"abs",
risset_ftv --> T,
HOLogic.mk_type_definition_pred T risset_ftv $ Bound 1 $ Bound 0 $ rissett
)
)
end;
fun dest_local_typedef_ex t =
let
val (_, T', t') = HOLogic.dest_exists t
handle TERM ("dest_exists", _) =>
raise TERM ("dest_local_typedef_ex", single t)
val (_, _, t'') = HOLogic.dest_exists t'
handle TERM ("dest_exists", _) =>
raise TERM ("dest_local_typedef_ex", single t)
val (T''', _) = dest_funT T'
val t''' = t'' |> HOLogic.dest_type_definition |> #3
in (T''', t''') end;
datatype etts_output_type = default | verbose | active;
fun etts_output_type_of_string "" = default
| etts_output_type_of_string "!" = verbose
| etts_output_type_of_string "?" = active
| etts_output_type_of_string _ =
error "etts_output_type_of_string: invalid input";
fun string_of_etts_output_type default = "default"
| string_of_etts_output_type verbose = "verbose"
| string_of_etts_output_type active = "active";
fun is_verbose verbose = true
| is_verbose _ = false;
fun is_active active = true
| is_active _ = false;
fun is_default default = true
| is_default _ = false;
fun verbose_writer_prem etts_output_type writer c =
if is_verbose etts_output_type
then ETTS_Writer.write_action c writer
else writer
fun verbose_writer_concl_thms etts_output_type ctxt thms =
if is_verbose etts_output_type
then map (Thm.string_of_thm ctxt #> writeln) thms
else single ();
fun verbose_writer_concl_types etts_output_type ctxt Ts =
if is_verbose etts_output_type
then map (Syntax.string_of_typ ctxt #> writeln) Ts
else single ();
fun cancel_type_definition_repeat n thm =
let
fun apply_cancel_type_definition 0 thm = thm
| apply_cancel_type_definition n thm = thm
|> Local_Typedef.cancel_type_definition
|> rotate_prems 1
|> apply_cancel_type_definition (n - 1)
in
thm
|> apply_cancel_type_definition n
|> rotate_prems (~n)
end;
local
val risset_tthms =
[@{thm type_definition_Domainp}, @{thm type_definition_Domainp'}];
val sc_tthms =
[
@{thm typedef_bi_unique},
@{thm typedef_right_total},
@{thm typedef_left_unique},
@{thm typedef_right_unique}
];
fun get_riT rit = rit
|> type_of
|> (fn T => (T |> binder_types |> the_single, body_type T));
fun mk_cr_rhst rept =
let
val (isoT, domT) = get_riT rept
val rhst =
Abs
(
"r",
domT,
Abs
(
"a",
isoT,
Const (\<^const_name>‹HOL.eq›, domT --> domT --> HOLogic.boolT) $
Bound 1 $
(rept $ Bound 0)
)
)
in rhst end;
fun etts_rlt_ctxt_intialize rispec = length rispec;
fun etts_rlt_ctxt_mk_fresh_ris ctxt rispec = rispec
|> map #2
|> map (fn t => Term.add_frees t [])
|> flat
|> dup
||> map #1
||> Variable.fix_new_vars ctxt
|>> map Free
|-> fold_rev Variable.declare_term;
fun etts_rlt_ctxt_mk_fresh_risstv ctxt etts_output_type writer nds rispec =
let
val writer' = verbose_writer_prem
etts_output_type writer "types associated with the RIs..."
val (rispec', ctxt') = ctxt
|> (\<^sort>‹HOL.type› |> replicate nds |> Variable.invent_types)
|>> curry (swap #> op~~) rispec
val _ = verbose_writer_concl_types
etts_output_type ctxt' (map (#1 #> TFree) rispec')
in ((writer', rispec'), ctxt') end;
fun etts_rlt_ctxt_mk_ltd_assms ctxt etts_output_type writer rispec =
let
val writer' = verbose_writer_prem
etts_output_type writer "assumptions for the local typedef..."
val (ltd_assms, ctxt') = rispec
|>
(
apsnd #2
#> mk_local_typedef_ex
#> HOLogic.mk_Trueprop
#> Thm.cterm_of ctxt
|> map
)
|> (fn ltdts => Assumption.add_assumes ltdts ctxt)
val _ = verbose_writer_concl_thms etts_output_type ctxt' ltd_assms
in ((writer', ltd_assms), ctxt') end;
local
fun mk_ex_crt rept =
let
val (isoT, domainT) = get_riT rept
val crT = domainT --> isoT --> HOLogic.boolT
val rhst = mk_cr_rhst rept
val t = HOLogic.mk_exists
(
"cr",
crT,
Const (\<^const_name>‹HOL.eq›, crT --> crT --> HOLogic.boolT) $
Bound 0 $
rhst
)
in t end;
in
fun etts_rlt_ctxt_mk_crs ctxt etts_output_type writer nds ltd_assms =
let
val writer' = verbose_writer_prem etts_output_type writer "crs..."
val ((ra_var_specs, ra_thms), ctxt') = ctxt
|> Obtain.result
(K (REPEAT (eresolve_tac ctxt (single @{thm exE}) 1))) ltd_assms
val repts = ra_var_specs
|> map (#2 #> Thm.term_of)
|> chop nds
|> #1
val ex_cr_thms =
let
val hol_ex_cr_tac = resolve_tac ctxt' (single @{thm ex_eq}) 1
fun hol_cr_prover thm =
Goal.prove ctxt' [] [] thm (K (hol_ex_cr_tac))
in map (mk_ex_crt #> HOLogic.mk_Trueprop #> hol_cr_prover) repts end
val ((crts, hol_cr_thms), ctxt'') = ctxt'
|> Obtain.result
(K (REPEAT (eresolve_tac ctxt' (single @{thm exE}) 1))) ex_cr_thms
|>> (fn x => x |>> map #2 |>> map Thm.term_of)
val pure_cr_thms =
let
val pure_crts = map Logic.mk_equals (crts ~~ (map mk_cr_rhst repts))
fun pure_cr_tac thm _ =
Object_Logic.full_atomize_tac ctxt'' 1
THEN resolve_tac ctxt'' (single thm) 1
fun pure_cr_prover (goal, tac_thm) =
Goal.prove ctxt'' [] [] goal (pure_cr_tac tac_thm)
in map pure_cr_prover (pure_crts ~~ hol_cr_thms) end
val _ = verbose_writer_concl_thms etts_output_type ctxt'' pure_cr_thms
in ((writer', ra_thms, crts, pure_cr_thms), ctxt'') end;
end;
fun etts_rlt_ctxt_mk_ri_tr ctxt etts_output_type writer ra_thms pure_cr_thms =
let
val writer' =
verbose_writer_prem etts_output_type writer "main transfer rules..."
val (risset_transfer_thms, sc_transfer_thms) =
let
val OFthms = map list_of_pair (ra_thms ~~ pure_cr_thms)
val apply_OFthms =
map (fn thm => map ((curry op OF) thm) OFthms) #> flat
in (risset_tthms, sc_tthms) |>> apply_OFthms ||> apply_OFthms end
val _ = verbose_writer_concl_thms
etts_output_type ctxt (risset_transfer_thms @ sc_transfer_thms)
in (writer', risset_transfer_thms, sc_transfer_thms) end;
local
fun get_sc_ex_rissets risset_transfer_thms sc_transfer_thms =
let val nds = (length risset_transfer_thms) div (length risset_tthms)
in
(risset_transfer_thms, sc_transfer_thms)
|>> take nds
||> chop nds
||> (nds |> chop #> #1 |> apsnd)
||> op ~~
|> op ~~
end;
in
fun etts_rlt_ctxt_mk_sbt_tr
ctxt
etts_output_type
writer
risset_transfer_thms
sc_transfer_thms
rispec
sbtspec =
let
val writer' = verbose_writer_prem
etts_output_type writer "transfer rules for the sbts..."
val ((sbtspec_specs, pp_thms), ctxt') =
let
val sc_ex_rissets = get_sc_ex_rissets risset_transfer_thms sc_transfer_thms
val scthms_of_ftv =
let
val scthms_ftv =
(
map (#1 #> #2 #> #2 #> type_of #> dest_rissetT) rispec ~~
map reroute_sp_triple sc_ex_rissets
)
in AList.lookup op= scthms_ftv end
fun thm_prem_ftvs thm = thm
|> Thm.prems_of
|> map (fn t => Term.add_tfrees t [])
|> flat
|> distinct op=
fun get_sc_ftv_specs (thm_ftv_specs, rvt_ftv_specs) = rvt_ftv_specs
|> subtract op= (rvt_ftv_specs |> subtract op= thm_ftv_specs)
fun obtain_prs ctxt ex_pr_thms = case ex_pr_thms of
[] => (([], []), ctxt)
| _ => Obtain.result
(K (REPEAT (eresolve_tac ctxt (single @{thm exE}) 1)))
ex_pr_thms
ctxt
in
sbtspec
|>
(
(Thm.cterm_of ctxt #> (sbt_data_of ctxt #> the) |> apdupl)
#> swap
|> apsnd
#> reroute_sp_ps
|> map
)
|> map (reroute_ps_sp #> apsnd swap)
|>
(
(fn (thm, t) => (thm, (thm, t)))
#>
(
(apfst thm_prem_ftvs)
#> (type_of #> (fn t => Term.add_tfreesT t []) |> apsnd)
#> get_sc_ftv_specs
#>
(
Option.compose (list_of_triple, scthms_of_ftv)
#>
(
fn xs_opt => case xs_opt of
SOME xs_opt => xs_opt
| NONE => []
)
|> map
#> flat
)
|> apsnd
)
#> op OF
|> apsnd
|> map
)
|> split_list
||> obtain_prs ctxt
|> reroute_sp_ps
|>> reroute_sp_ps
|>> apfst op~~
|>> (#2 |> apsnd |> map |> apfst)
|>> apsnd Transfer.mk_transfer_rels
end
val _ = verbose_writer_concl_thms etts_output_type ctxt' pp_thms
in ((writer', pp_thms, sbtspec_specs), ctxt') end;
end;
fun etts_rlt_ctxt_mk_transfer risset_transfer_thms sc_transfer_thms pp_thms =
risset_transfer_thms @ sc_transfer_thms @ pp_thms;
fun etts_rlt_ctxt_mk_rispec rispec =
map (#1 #> swap #> apfst #1) rispec;
fun etts_rlt_ctxt_mk_sbtspec sbtspec_specs =
let
val sbtspec_var_specs = sbtspec_specs
|> filter (apfst is_Var #> #1)
|> map (apfst dest_Var)
val sbtspec_const_specs = sbtspec_specs
|> filter (apfst is_Const #> #1)
|> map (apfst dest_Const)
in (sbtspec_var_specs, sbtspec_const_specs) end;
in
fun init_rlt_ctxt ctxt etts_output_type writer rispec sbtspec =
let
val nds = etts_rlt_ctxt_intialize rispec
val ctxt' = etts_rlt_ctxt_mk_fresh_ris ctxt rispec
val ((writer', rispec'), ctxt'') = etts_rlt_ctxt_mk_fresh_risstv
ctxt' etts_output_type writer nds rispec
val ((writer'', ltd_assms), ctxt''') = etts_rlt_ctxt_mk_ltd_assms
ctxt'' etts_output_type writer' rispec'
val ((writer''', ra_thms, crts, pure_cr_thms), ctxt'''') =
etts_rlt_ctxt_mk_crs ctxt''' etts_output_type writer'' nds ltd_assms
val rispec'' = rispec' ~~ crts
val (writer'''', risset_transfer_thms, sc_transfer_thms) = etts_rlt_ctxt_mk_ri_tr
ctxt'''' etts_output_type writer''' ra_thms pure_cr_thms
val ((writer''''', pp_thms, sbtspec_specs), ctxt''''') =
etts_rlt_ctxt_mk_sbt_tr
ctxt''''
etts_output_type
writer''''
risset_transfer_thms
sc_transfer_thms
rispec''
sbtspec
val transfer_thms = etts_rlt_ctxt_mk_transfer
risset_transfer_thms sc_transfer_thms pp_thms
val rispec''' = etts_rlt_ctxt_mk_rispec rispec''
val (sbtspec_var_specs, sbtspec_const_specs) =
etts_rlt_ctxt_mk_sbtspec sbtspec_specs
in
(
ctxt,
ctxt''''',
writer''''',
rispec''',
sbtspec_var_specs,
sbtspec_const_specs,
transfer_thms
)
end;
end;
local
fun etts_algorithm_fresh_stv
ctxt
writer
rispec
sbtspec_var_specs
sbtspec_const_specs
thm =
let
val stvs = thm |> Thm.full_prop_of |> (fn t => Term.add_tvars t [])
val rispec' = rispec
|> filter (fn (v, _) => member op= (map fst stvs) v)
|> map (apfst (apdupr ((AList.lookup op= stvs #> the))))
val thm_stvs =
let val cs = rispec' |> map fst |> map fst |> map fst
in stvs |> filter (fn (v, _) => fst v |> member op= cs |> not) end
val cs =
let
fun folder c (cs, nctxt) =
let val out = Name.variant c nctxt
in (fst out::cs, snd out) end
val cs = rispec' |> map snd |> map fst
val nctxt = fold Name.declare cs (Variable.names_of ctxt)
in fold folder (thm_stvs |> map fst |> map fst) ([], nctxt) |> fst end
val rhsTs = cs ~~ map (reroute_ps_sp #> snd) thm_stvs
|> map reroute_sp_ps
|> map TVar
val thm' =
let val rhs_cT = map (Thm.ctyp_of ctxt) rhsTs
in Drule.instantiate_normalize (thm_stvs ~~ rhs_cT, []) thm end
fun thm_stvs_map (v, T) =
case AList.lookup op= (thm_stvs ~~ rhsTs) (v, T) of
SOME T => T
| NONE => TVar (v, T)
val sbtspec_var_specs = sbtspec_var_specs
|> map (fn ((v, T), x) => ((v, map_type_tvar thm_stvs_map T), x))
val sbtspec_const_specs = sbtspec_const_specs
|> map (fn ((c, T), x) => ((c, map_type_tvar thm_stvs_map T), x))
val thm_stvs = thm' |> Thm.full_prop_of |> (fn t => Term.add_tvars t [])
val thm_stvs_map = map_type_tvar
(fn (v, _) => TVar (v, (AList.lookup op= thm_stvs #> the) v))
val sbtspec_const_specs = sbtspec_const_specs
|> map (fn ((c, T), x) => ((c, thm_stvs_map T), x))
in ((writer, rispec', sbtspec_var_specs, sbtspec_const_specs), thm') end;
fun etts_algorithm_unfold_ud_with
ctxt''
etts_output_type
writer
sbtspec_var_specs
sbtspec_const_specs
thm =
let
val writer' = verbose_writer_prem etts_output_type writer "unfold ud_with..."
val ud_with_thms = ctxt''
|> UDWithData.get
|> map (Local_Defs.meta_rewrite_rule ctxt'')
val thm' = Local_Defs.unfold ctxt'' ud_with_thms thm
val stvs = thm' |> Thm.full_prop_of |> (fn t => Term.add_vars t [])
val consts = thm' |> Thm.full_prop_of |> (fn t => Term.add_consts t [])
val sbtspec_var_specs = sbtspec_var_specs
|> filter (fn ((v, T), _) => member op= stvs (v, T))
val sbtspec_const_specs = sbtspec_const_specs
|> filter (fn (const, _) => member op= consts const)
val sbtspec_specs =
(
(map (apfst Var) sbtspec_var_specs) @
(map (apfst Const) sbtspec_const_specs)
)
val _ = verbose_writer_concl_thms etts_output_type ctxt'' (single thm')
in ((writer', sbtspec_specs), thm') end;
fun etts_algorithm_unoverload_types
ctxt' etts_output_type writer rispec sbtspec_specs thm =
let
val writer' =
verbose_writer_prem etts_output_type writer "unoverload types..."
val thm' = Unoverload_Type.unoverload_type
(Context.Proof ctxt') (rispec |> map (#1 #> #1) |> rev) thm
val t = Thm.full_prop_of thm
val n = Logic.count_prems t
val out_t = Thm.full_prop_of thm'
val out_n = Logic.count_prems out_t
val out_prem_ts = out_t |> Logic.strip_imp_prems |> drop (out_n - n)
val out_t' = Logic.list_implies (out_prem_ts, Logic.strip_imp_concl out_t)
val (mapT, mapt) = (Thm.cterm_of ctxt' out_t', Thm.cprop_of thm)
|> Thm.match
|>> map (apfst TVar)
||> map (apfst Var)
|>> map (apsnd Thm.typ_of)
||> map (apsnd Thm.term_of)
|>> map swap
||> map swap
val rispec' = rispec
|> map (apfst TVar)
|> map (apfst (map_atyps (AList.lookup op= mapT #> the)))
|> map (apfst dest_TVar)
val sbtspec_specs' = sbtspec_specs
|> map (apfst (map_aterms (AList.lookup op= mapt #> the)))
|> map (apfst dest_Var)
|> map (apfst (apsnd (map_atyps (AList.lookup op= mapT #> the))))
val _ = verbose_writer_concl_thms etts_output_type ctxt' (single thm')
in ((writer', rispec', sbtspec_specs'), thm') end;
fun etts_algorithm_subst_type ctxt' etts_output_type writer rispec thm =
let
val writer' = verbose_writer_prem
etts_output_type writer "substitution of type variables..."
val thm' =
Drule.instantiate_normalize
(
rispec
|> map (apsnd TFree)
|> map (apsnd (Thm.ctyp_of ctxt')),
[]
)
thm
val _ = verbose_writer_concl_thms etts_output_type ctxt' (single thm')
in (writer', thm') end;
fun etts_algorithm_subst_var ctxt' etts_output_type writer sbtspec_specs thm =
let
val writer' = verbose_writer_prem
etts_output_type writer "substitution of variables..."
val thm' = sbtspec_specs
|> (Var #> (ctxt' |> Thm.cterm_of) |> apfst |> map)
|> map Thm.first_order_match
|> fold Drule.instantiate_normalize
|> curry op|> thm
val _ = verbose_writer_concl_thms etts_output_type ctxt' (single thm')
in (writer', thm') end;
fun etts_algorithm_untransfer ctxt' etts_output_type writer transfer_thms thm =
let
val writer' = verbose_writer_prem etts_output_type writer "untransfer..."
val (thm', context) = Thm.apply_attribute
(Transfer.untransferred_attribute transfer_thms)
thm
(Context.Proof ctxt')
val _ = verbose_writer_concl_thms etts_output_type ctxt' (single thm')
in (context, writer', thm') end;
fun etts_algorithm_export context ctxt etts_output_type writer thm =
let
val writer' = verbose_writer_prem etts_output_type writer "export..."
val thy' = Context.theory_of context
val ctxt' = Context.proof_of context
val ctxt'' = Proof_Context.transfer thy' ctxt
val thm' = singleton (Proof_Context.export ctxt' ctxt'') thm
val _ = verbose_writer_concl_thms etts_output_type ctxt'' (single thm')
in ((writer', thm'), ctxt'') end;
fun etts_algorithm_ctd ctxt etts_output_type writer rispec thm =
let
val writer' =
verbose_writer_prem etts_output_type writer "cancel type definition..."
val thm' = (rispec |> length |> cancel_type_definition_repeat) thm
val _ = verbose_writer_concl_thms etts_output_type ctxt (single thm')
in ((writer', thm'), ctxt) end;
in
fun etts_kera
ctxt
ctxt'
etts_output_type
writer
rispec
sbtspec_var_specs
sbtspec_const_specs
transfer_thms
thm =
let
val ((writer', rispec, sbtspec_var_specs, sbtspec_const_specs), thm') =
etts_algorithm_fresh_stv
ctxt'
writer
rispec
sbtspec_var_specs
sbtspec_const_specs
thm
val ((writer'', sbtspec_specs), thm'') =
etts_algorithm_unfold_ud_with
ctxt'
etts_output_type
writer'
sbtspec_var_specs
sbtspec_const_specs
thm'
val ((writer''', rispec, sbtspec_specs'), thm''') =
etts_algorithm_unoverload_types
ctxt' etts_output_type writer'' rispec sbtspec_specs thm''
val (writer'''', thm'''') = etts_algorithm_subst_type
ctxt' etts_output_type writer''' rispec thm'''
val (writer''''', thm''''') = etts_algorithm_subst_var
ctxt' etts_output_type writer'''' sbtspec_specs' thm''''
val (context, writer'''''', thm'''''') = etts_algorithm_untransfer
ctxt' etts_output_type writer''''' transfer_thms thm'''''
val ((writer''''''', thm'''''''), ctxt'') = etts_algorithm_export
context ctxt etts_output_type writer'''''' thm''''''
val ((writer'''''''', thm''''''''), ctxt''') = etts_algorithm_ctd
ctxt'' etts_output_type writer''''''' rispec thm'''''''
in ((thm'''''''', writer''''''''), ctxt''') end;
end;
local
fun etts_algorithm_simplification ctxt etts_output_type writer sbrr_opt thm =
let
val writer = verbose_writer_prem etts_output_type writer "simplification..."
val out_thm = More_Simplifier.rewrite_simp_opt' ctxt sbrr_opt thm
val _ = verbose_writer_concl_thms etts_output_type ctxt (single out_thm)
in (writer, out_thm) end;
local
fun term_equiv_st (t, u) =
let
fun term_equiv_st ((Const (a, T)), (Const (b, U))) =
a = b andalso Type.could_match (T, U)
| term_equiv_st ((Free (_, T)), (Free (_, U))) = Type.could_match (T, U)
| term_equiv_st ((Var (_, T)), (Var (_, U))) = Type.could_match (T, U)
| term_equiv_st ((Free (_, T)), (Var (_, U))) = Type.could_match (T, U)
| term_equiv_st ((Var (_, T)), (Free (_, U))) = Type.could_match (T, U)
| term_equiv_st ((Const (_, T)), (Free (_, U))) = Type.could_match (T, U)
| term_equiv_st ((Free (_, T)), (Const (_, U))) = Type.could_match (T, U)
| term_equiv_st ((Const (_, T)), (Var (_, U))) = Type.could_match (T, U)
| term_equiv_st ((Var (_, T)), (Const (_, U))) = Type.could_match (T, U)
| term_equiv_st ((Bound n), (Bound m)) = (n = m)
| term_equiv_st ((Abs (_, T, t)), (Abs (_, U, u))) =
Type.could_match (T, U) andalso term_equiv_st (t, u)
| term_equiv_st ((tl $ tr), (ul $ ur)) =
term_equiv_st (tl, ul) andalso term_equiv_st (tr, ur)
| term_equiv_st ((Var (_, T)), (ul $ ur)) =
Type.could_match (T, type_of (ul $ ur))
| term_equiv_st ((Var (_, T)), (Abs (c, U, u))) =
Type.could_match (T, type_of (Abs (c, U, u)))
| term_equiv_st (_, _) = false;
in
if
(Term.add_frees t [] |> null |> not)
andalso (Term.add_frees u [] |> null |> not)
then term_equiv_st (t, u)
else false
end;
in
fun etts_algorithm_subst_prems ctxt etts_output_type writer subst_thms thm =
let
val writer' = verbose_writer_prem
etts_output_type writer "substitute known premises..."
val thm' =
let
val subst_thms = Attrib.eval_thms ctxt subst_thms
val subst_thmst = map Thm.full_prop_of subst_thms
fun option_thm thm_opt = case thm_opt of
SOME thm => thm
| _ => @{thm _}
fun mk_OFthms ts = ts
|>
(
(subst_thmst ~~ subst_thms)
|> AList.lookup term_equiv_st
|> map
)
|> map option_thm
fun subst_premises_repeat thm =
let
val premsts = thm |> Thm.full_prop_of |> Logic.strip_imp_prems
val out_thm = thm OF (mk_OFthms premsts)
in
if Thm.nprems_of thm = Thm.nprems_of out_thm
then out_thm
else subst_premises_repeat out_thm
end
in subst_premises_repeat thm end
val _ = verbose_writer_concl_thms etts_output_type ctxt (single thm')
in (writer', thm') end;
end;
fun etts_algorithm_premred ctxt etts_output_type writer mpespc_opt thm =
let
val writer' =
verbose_writer_prem etts_output_type writer "elimination of premises..."
val thm' = case mpespc_opt of
SOME m_spec =>
let
val (out_thm, ctxt') = Thm.unvarify_local_thm ctxt thm
val out_thm = out_thm
|> ETTS_Tactics.prem_red ctxt' m_spec
|> singleton (Proof_Context.export ctxt' ctxt)
in out_thm end
| NONE => thm
val _ = verbose_writer_concl_thms etts_output_type ctxt (single thm')
in (writer', thm') end;
fun etts_algorithm_app_attrb ctxt etts_output_type writer attrbs thm =
let
val writer' = verbose_writer_prem etts_output_type writer
"application of the attributes for the set-based theorem..."
val (thm', ctxt') =
let
val attrbs =
map (Attrib.check_src ctxt #> Attrib.attribute ctxt) attrbs
in Thm.proof_attributes attrbs thm ctxt end
val _ = verbose_writer_concl_thms etts_output_type ctxt' (single thm')
in (writer', thm') end;
in
fun etts_algorithm_pp
ctxt etts_output_type writer sbrr_opt subst_thms mpespc_opt attrbs thm =
let
val (writer', thm') = etts_algorithm_simplification
ctxt etts_output_type writer sbrr_opt thm
val (writer'', thm'') = etts_algorithm_subst_prems
ctxt etts_output_type writer' subst_thms thm'
val (writer''', thm''') = etts_algorithm_premred
ctxt etts_output_type writer'' mpespc_opt thm''
val (writer'''', thm'''') = etts_algorithm_app_attrb
ctxt etts_output_type writer''' attrbs thm'''
in ((thm'''', writer''''), ctxt) end;
end;
local
fun mk_msg_etts_algorithm msg = "tts_algorithm: " ^ msg;
fun etts_algorithm_input rispec thm =
let
val msg_etts_context = mk_msg_etts_algorithm
"ERA can only be invoked from an appropriately parameterized tts context"
val msg_ftvs = mk_msg_etts_algorithm
"fixed type variables must not occur in the type-based theorems"
val msg_fvs = mk_msg_etts_algorithm
"fixed variables must not occur in the type-based theorems"
val msg_not_risstv_subset = mk_msg_etts_algorithm
"risstv must be a subset of the schematic type " ^
"variables that occur in the type-based theorems"
val _ = not (null rispec) orelse error msg_etts_context
val t = Thm.full_prop_of thm
val _ = t
|> (fn t => Term.add_tfrees t [])
|> null
orelse error msg_ftvs
val _ = t
|> (fn t => Term.add_frees t [])
|> null
orelse error msg_fvs
val stvs = t
|> (fn t => Term.add_tvars t [])
|> map #1
|> distinct op=
val risstv = map #1 rispec
val _ = subset op= (risstv, stvs) orelse error msg_not_risstv_subset
in () end;
in
fun etts_algorithm
ctxt
etts_output_type
writer
rispec
sbtspec
sbrr_opt
subst_thms
mpespc_opt
attrbs
thm =
let
val _ = etts_algorithm_input rispec thm
val
(
ctxt,
ctxt',
writer,
rispec,
sbtspec_var_specs,
sbtspec_const_specs,
transfer_thms
) = init_rlt_ctxt ctxt etts_output_type writer rispec sbtspec
val writer' = ETTS_Writer.increment_index 2 writer
val ((thm', writer'), ctxt'') = etts_kera
ctxt
ctxt'
etts_output_type
writer'
rispec
sbtspec_var_specs
sbtspec_const_specs
transfer_thms
thm
val writer'' = ETTS_Writer.increment_index 2 writer'
val ((thm'', writer'''), ctxt''') = etts_algorithm_pp
ctxt'' etts_output_type writer'' sbrr_opt subst_thms mpespc_opt attrbs thm'
in ((thm'', writer'''), ctxt''') end;
end;
fun etts_fact
ctxt
etts_output_type
writer
rispec
sbtspec
sbrr_opt
subst_thms
mpespc_opt
attrbs
thms =
let
fun folder thm ((thms, writer), ctxt) =
etts_algorithm
ctxt
etts_output_type
writer
rispec
sbtspec
sbrr_opt
subst_thms
mpespc_opt
attrbs
thm
|>> apsnd (ETTS_Writer.increment_index 1)
|>> apfst (curry (swap #> op::) thms)
in fold_rev folder thms (([], writer), ctxt) end;
end;File ‹ETTS_Active.ML›
signature ETTS_ACTIVE =
sig
val etts_indent : int Config.T
datatype etts_thm_type =
tts_lemma | tts_theorem | tts_corollary | tts_proposition
val theorem_string_of_term :
Proof.context ->
etts_thm_type ->
string ->
Token.src list ->
string ->
Token.src list ->
term ->
string
end;
structure ETTS_Active : ETTS_ACTIVE =
struct
val etts_indent = Attrib.setup_config_int \<^binding>‹tts_indent› (K 2)
fun etts_indent_val ctxt = Config.get ctxt etts_indent
fun mk_etts_indent ctxt n = replicate (n*(etts_indent_val ctxt)) " "
|> String.concat
datatype etts_thm_type =
tts_lemma | tts_theorem | tts_corollary | tts_proposition;
fun string_of_etts_thm_type tts_lemma = "tts_lemma"
| string_of_etts_thm_type tts_theorem = "tts_theorem"
| string_of_etts_thm_type tts_corollary = "tts_corollary"
| string_of_etts_thm_type tts_proposition = "tts_proposition";
fun maybe_quote ctxt =
ATP_Util.maybe_quote (Thy_Header.get_keywords' ctxt);
fun print_term ctxt t = t
|> Print_Mode.setmp [] (Syntax.unparse_term ctxt #> Pretty.string_of)
|> Sledgehammer_Util.simplify_spaces
|> maybe_quote ctxt;
fun mk_print_ctxt ctxt = fold
(fn opt => Config.put opt false)
[show_markup, Printer.show_type_emphasis, show_types, show_sorts, show_consts]
ctxt;
fun mk_assumptions ctxt assmcs =
let fun mk_eoln c = c ^ "\n"
in
case assmcs of
[] => ""
| assmcs =>
mk_etts_indent ctxt 1 ^
"assumes " ^
String.concatWith ((mk_etts_indent ctxt 2) ^ "and ") (map mk_eoln assmcs)
end;
fun mk_attr_string attrs =
if length attrs = 0
then ""
else
let
val attrsc = attrs
|> map (map Token.unparse)
|> map (String.concatWith " ")
|> String.concatWith ", "
in "[" ^ attrsc ^ "]" end
fun mk_is ctxt thm_in_name attrs_in =
mk_etts_indent ctxt 2 ^ "is " ^ thm_in_name ^ mk_attr_string attrs_in
fun mk_shows ctxt thm_in_name attrs_in conclcs = case conclcs of
[] => ""
| conclcs =>
mk_etts_indent ctxt 1 ^
"shows " ^
String.concatWith "\n" conclcs ^ "\n" ^
mk_is ctxt thm_in_name attrs_in;
fun mk_preamble etts_thm_type thm_out_name attrs_out =
(string_of_etts_thm_type etts_thm_type) ^ " " ^
thm_out_name ^
mk_attr_string attrs_out ^ ":\n"
fun theorem_string_of_term
ctxt etts_thm_type thm_out_name attrs_out thm_in_name attrs_in t =
let
val ctxt = mk_print_ctxt ctxt
val t = t
|> singleton (Syntax.uncheck_terms ctxt)
|> Sledgehammer_Isar_Annotate.annotate_types_in_term ctxt
val assmsc = t
|> Logic.strip_imp_prems
|> map (print_term ctxt)
|> mk_assumptions ctxt
val conclc = t
|> Logic.strip_imp_concl
|> Logic.dest_conjunctions
|> map (print_term ctxt)
|> mk_shows ctxt thm_in_name attrs_in
val preamblec = mk_preamble etts_thm_type thm_out_name attrs_out
val done = ".\n\n"
val thmc = String.concatWith "" [preamblec, assmsc, conclc, done]
in thmc end;
end;File ‹ETTS_Lemma.ML›
signature ETTS_LEMMA =
sig
val tts_lemma : Outer_Syntax.command_keyword -> string -> unit
end
structure ETTS_Lemma : ETTS_LEMMA =
struct
open ETTS_Context;
open ETTS_Algorithm;
datatype tts_addendum = tts_given | tts_is;
fun string_to_tts_addendum "given" = tts_given
| string_to_tts_addendum "is" = tts_is
| string_to_tts_addendum _ = error "string_to_tts_addendum: invalid input.";
fun eq_thm_tac ctxt thm i =
let
fun eq_thm_impl ctxt thm goal_thm =
let
val error_msg = "eq_thm_tac failed on " ^ (Thm.string_of_thm ctxt thm)
val goal_prem_ct = goal_thm
|> Thm.cprems_of
|> the_single
val thm_ct = Thm.cprop_of thm
val thm = thm
|> Drule.instantiate_normalize (Thm.match (thm_ct, goal_prem_ct))
|> Drule.eta_contraction_rule
handle Pattern.MATCH => error error_msg
val _ = ((Thm.full_prop_of thm) aconv (Thm.term_of goal_prem_ct))
orelse error error_msg
in Thm.implies_elim goal_thm thm end
in SELECT_GOAL (PRIMITIVE (eq_thm_impl ctxt thm)) i end;
fun tts_lemma_tac ctxt (tts_given, thm) = Method.insert_tac ctxt (single thm)
| tts_lemma_tac ctxt (tts_is, thm) = eq_thm_tac ctxt thm;
fun tts_lemma_map_tac ctxt tts_thm_spec =
let
val tts_addendum_map =
AList.lookup op= (1 upto (length tts_thm_spec) ~~ tts_thm_spec) #> the
fun tac_map n = tts_lemma_tac ctxt (tts_addendum_map n) n
in ALLGOALS tac_map end;
fun tts_lemma_map_method tts_thm_spec =
let
val method = CONTEXT_METHOD
(
fn _ => fn (ctxt, st) => st
|> tts_lemma_map_tac ctxt tts_thm_spec
|> Context_Tactic.TACTIC_CONTEXT ctxt
)
in method end;
fun refine_tts_lemma_map thmss =
Proof.refine_singleton (Method.Basic (K (tts_lemma_map_method thmss)));
fun relativization ctxt thms =
let
val
{
mpespc_opt = mpespc_opt,
rispec = rispec,
sbtspec = sbtspec,
sbrr_opt = sbrr_opt,
subst_thms = subst_thms,
attrbs = attrbs
} = get_tts_ctxt_data ctxt
val writer = ETTS_Writer.initialize 4
val ((thms, _), _) = ETTS_Algorithm.etts_fact
ctxt
default
writer
rispec
sbtspec
sbrr_opt
subst_thms
mpespc_opt
attrbs
thms
in thms end;
fun insert_rotate j thms =
CONTEXT_METHOD
(
fn _ => fn (ctxt, st) => st
|> ALLGOALS (fn i => Method.insert_tac ctxt thms i THEN rotate_tac j i)
|> Context_Tactic.TACTIC_CONTEXT ctxt
);
fun refine_insert_rotate j ths =
Proof.refine_singleton (Method.Basic (K (insert_rotate j ths)));
fun mk_tts_goal tts_thms_specs outer_ctxt st =
let
val tts_thms_specs = tts_thms_specs
|> map
(
relativization outer_ctxt
|> apsnd
#>
(
fn (tts_addendum, thms) =>
(replicate (length thms) tts_addendum, thms)
)
#> op~~
)
|> flat
|> map (apfst string_to_tts_addendum)
val ctxt = Proof.context_of st
val assms = Assumption.local_prems_of ctxt outer_ctxt
val all_ftv_permutes = assms
|> map
(
Thm.hyps_of
#> the_single
#> Logic.get_forall_ftv_permute
#> #2
#> #2
)
val assms = map2 (Thm.forall_intr_var_order ctxt) all_ftv_permutes assms
val st = refine_insert_rotate (~(length assms)) assms st
in refine_tts_lemma_map tts_thms_specs st end;
local
val long_keyword =
Parse_Spec.includes >> K "" || Parse_Spec.long_statement_keyword;
val parse_tts_addendum =
\<^keyword>‹given› -- Parse.thm || \<^keyword>‹is› -- Parse.thm;
val parse_obtains =
Parse.$$$ "obtains" |-- Parse.!!! (Parse_Spec.obtains -- parse_tts_addendum);
fun process_obtains args =
(args |> #1 |> Element.Obtains, args |> #2 |> single);
val parse_shows =
let
val statement = Parse.and_list1
(
Parse_Spec.opt_thm_name ":" --
Scan.repeat1 Parse.propp --
parse_tts_addendum
);
in Parse.$$$ "shows" |-- Parse.!!! statement end;
fun process_shows args = (args |> map #1 |> Element.Shows, map #2 args);
val parse_long_statement =
Scan.optional
(Parse_Spec.opt_thm_name ":" --| Scan.ahead long_keyword)
Binding.empty_atts --
Scan.optional Parse_Spec.includes [] --
(
Scan.repeat Parse_Spec.context_element --
(parse_obtains >> process_obtains || parse_shows >> process_shows)
);
fun process_long_statement
(((binding, includes), (elems, (concl, tts_thms_specs)))) =
(true, binding, includes, elems, concl, tts_thms_specs);
val long_statement = parse_long_statement >> process_long_statement;
val parse_short_statement =
Parse_Spec.statement --
Parse_Spec.if_statement --
Parse.for_fixes --
parse_tts_addendum;
fun process_short_statement (((shows, assumes), fixes), tts_thms_specs) =
(
false,
Binding.empty_atts,
[],
[Element.Fixes fixes, Element.Assumes assumes],
Element.Shows shows,
single tts_thms_specs
);
val short_statement = parse_short_statement >> process_short_statement;
in
val parse_tts_lemma = long_statement || short_statement;
end;
fun process_tts_lemma
(long, binding, includes, elems, concl, tts_thms_specs) b lthy =
let
val tts_thms_specs =
map (single #> Attrib.eval_thms lthy |> apsnd) tts_thms_specs
in
lthy
|> Specification.theorem_cmd
long Thm.theoremK NONE (K I) binding includes elems concl b
|> mk_tts_goal tts_thms_specs lthy
end;
fun tts_lemma spec descr = Outer_Syntax.local_theory_to_proof'
spec ("state " ^ descr) (parse_tts_lemma >> process_tts_lemma);
end;File ‹ETTS_Lemmas.ML›
signature TTS_LEMMAS =
sig
val tts_lemmas :
Proof.context ->
ETTS_Algorithm.etts_output_type ->
((binding * Token.src list) * (thm list * (string * Token.src list))) list ->
Proof.context * int list
end;
structure TTS_Lemmas : TTS_LEMMAS =
struct
open ETTS_Algorithm;
open ETTS_Context;
open ETTS_Active;
fun register_output ctxt ab out_thms =
let
val ((thmc, out_thms), lthy) =
let
val facts' = (ab ||> map (Attrib.check_src ctxt), single (out_thms, []))
|> single
|> Attrib.partial_evaluation ctxt
in ctxt |> Local_Theory.notes facts' |>> the_single end
val _ = CTR_Utilities.thm_printer lthy true thmc out_thms
in (lthy, "") end
local
fun num_rep_prem eq ts =
let
val premss = map Logic.strip_imp_prems ts
val min_length = min_list (map length premss)
val premss = map (take min_length) premss
val template_prems = hd premss
val num_eq_prems = premss
|> tl
|>
map
(
compare_each eq template_prems
#> take_prefix (curry op= true)
#> length
)
val num_rep_prems =
if length premss = 1
then length template_prems
else min_list num_eq_prems
in (num_rep_prems, premss) end;
fun elim_assms assm_thms thm = fold (flip Thm.implies_elim) assm_thms thm;
fun thm_of_fact ctxt thms =
let
val ts = map Thm.full_prop_of thms
val (num_rep_prems, _) = num_rep_prem (op aconv) ts
val rep_prems = thms
|> hd
|> Thm.full_prop_of
|> Logic.strip_imp_prems
|> take num_rep_prems
|> map (Thm.cterm_of ctxt);
val all_ftv_rels =
let val subtract = swap #> uncurry (subtract op=)
in
rep_prems
|> map
(
Thm.term_of
#> Logic.forall_elim_all
#> apfst (fn t => Term.add_frees t [])
#> apsnd dup
#> reroute_sp_ps
#> apfst (apfst dup)
#> apfst reroute_ps_sp
#> apfst (apsnd subtract)
#> apfst subtract
)
end
val index_of_ftvs = all_ftv_rels
|> map
(
#1
#> map_index I
#> map swap
#> AList.lookup op=
#> curry (swap #> op#>) the
)
val all_indicess = (map #2 all_ftv_rels) ~~ index_of_ftvs
|> map (fn (x, f) => map f x)
val (assms, ctxt') = Assumption.add_assumes rep_prems ctxt
val stvss =
map
(
Thm.full_prop_of
#> (fn t => Term.add_vars t [])
#> map Var
#> map (Thm.cterm_of ctxt)
)
assms
val stvss = stvss ~~ all_indicess
|> map (fn (stvs, indices) => map (nth stvs) indices)
val assms = map2 forall_intr_list stvss assms
val thm = thms
|> map (elim_assms assms)
|> Conjunction.intr_balanced
|> singleton (Proof_Context.goal_export ctxt' ctxt)
in thm end;
in
fun active_output ctxt thm_out_name attrs_out thm_in_name attrs_in thms =
let
val (thms, ctxt') = Thm.unvarify_local_fact ctxt thms
val thmc = thms
|> thm_of_fact ctxt'
|> Thm.full_prop_of
|> theorem_string_of_term
ctxt tts_lemma thm_out_name attrs_out thm_in_name attrs_in
in (ctxt, thmc) end;
end;
fun tts_lemmas ctxt tts_output_type thm_specs =
let
val
{
mpespc_opt = mpespc_opt,
rispec = rispec,
sbtspec = sbtspec,
sbrr_opt = sbrr_opt,
subst_thms = subst_thms,
attrbs = attrbs
} = get_tts_ctxt_data ctxt
val writer = ETTS_Writer.initialize 4
fun folder
((b, attrs_out), (thms, (thm_in_name, attrs_in))) (ctxt, thmcs, writer) =
let
val ((out_thms, writer), ctxt) =
ETTS_Algorithm.etts_fact
ctxt
tts_output_type
writer
rispec
sbtspec
sbrr_opt
subst_thms
mpespc_opt
attrbs
thms
val writer = ETTS_Writer.increment_index 0 writer
val (lthy, thmc) =
if is_default tts_output_type orelse is_verbose tts_output_type
then register_output ctxt (b, attrs_out) out_thms
else
active_output
ctxt
(Name_Space.base_name b)
attrs_out
thm_in_name
attrs_in
out_thms
val thmcs = thmcs ^ thmc
in (lthy, thmcs, writer) end
val (ctxt'', thmcs, writer) = fold folder thm_specs (ctxt, "", writer)
val _ =
if is_active tts_output_type
then thmcs |> Active.sendback_markup_command |> writeln
else ()
in (ctxt'', writer) end;
local
val parse_output_mode = Scan.optional (\<^keyword>‹!› || \<^keyword>‹?›) "";
val parse_facts = \<^keyword>‹in› |-- Parse_Spec.name_facts;
in
val parse_tts_lemmas = parse_output_mode -- parse_facts;
end;
fun mk_msg_tts_lemmas msg = "tts_lemmas: " ^ msg;
fun thm_specs_raw_input thm_specs_raw =
let
val msg_multiple_facts =
mk_msg_tts_lemmas "only one fact per entry is allowed"
val _ = thm_specs_raw |> map (#2 #> length) |> List.all (fn n => n = 1)
orelse error msg_multiple_facts
in () end;
local
fun mk_thm_specs ctxt thm_specs_raw =
let
fun binding_last c =
let val binding_last_h = Long_Name.base_name #> Binding.qualified_name
in if size c = 0 then Binding.empty else binding_last_h c end
fun binding_of_binding_spec (b, (factb, thmbs)) =
if Binding.is_empty b
then
if length thmbs = 1
then
if Binding.is_empty (the_single thmbs)
then factb
else the_single thmbs
else factb
else b
val thm_specs = thm_specs_raw
|> map (apsnd the_single)
|>
(
Facts.ref_name
#> binding_last
|> apdupl
|> apfst
#> reroute_ps_sp
|> apsnd
|> map
)
|> map reroute_sp_ps
|> map (reroute_ps_sp #> (swap |> apsnd) #> reroute_sp_ps |> apfst)
|> map (apsnd dup)
|>
(
single
#> (ctxt |> Attrib.eval_thms)
#> (Thm.derivation_name #> binding_last |> map |> apdupl)
|> apfst
|> apsnd
|> map
)
|> map (reroute_sp_ps #> apfst reroute_sp_ps #> reroute_ps_sp)
|>
(
reroute_ps_sp
#> (swap |> apsnd)
#> reroute_sp_ps
#> (reroute_ps_sp #> binding_of_binding_spec |> apfst)
|> apfst
|> map
)
|> map (Facts.ref_name |> apfst |> apsnd |> apsnd)
in thm_specs end;
in
fun process_tts_lemmas args ctxt =
let
val tts_output_type = args |> #1 |> etts_output_type_of_string
val thm_specs_raw = #2 args
val _ = thm_specs_raw_input thm_specs_raw
val thm_specs = mk_thm_specs ctxt thm_specs_raw
in thm_specs |> tts_lemmas ctxt tts_output_type |> #1 end;
end;
val _ = Outer_Syntax.local_theory
\<^command_keyword>‹tts_lemmas›
"automated relativization of facts"
(parse_tts_lemmas >> process_tts_lemmas);
end;Theory ETTS_Auxiliary
section‹Auxiliary functionality and helpful lemmas for Types-To-Sets›
theory ETTS_Auxiliary
imports ETTS
begin
subsection‹Further transfer rules›
lemma Domainp_eq[transfer_domain_rule, transfer_rule]:
"Domainp (=) = (λx. x ∈ UNIV)"
by auto
lemma Domainp_rel_prod[transfer_domain_rule, transfer_rule]:
assumes "Domainp cr⇩1 = (λx. x ∈ 𝔘⇩1)" and "Domainp cr⇩2 = (λx. x ∈ 𝔘⇩2)"
shows "Domainp (rel_prod cr⇩1 cr⇩2) = (λx. x ∈ 𝔘⇩1 × 𝔘⇩2)"
using assms by (simp add: ctr_simps_pred_prod_eq_cart prod.Domainp_rel)
subsection‹Constant function›
definition const_fun :: "['b, 'a] ⇒ 'b" where "const_fun c = (λx. c)"
lemma const_fun_transfer[transfer_rule]:
includes lifting_syntax
assumes "Domainp A = (λx. x ∈ 𝔘A)" and "∀x ∈ 𝔘A. f x = c" and "B c c'"
shows "(A ===> B) f (const_fun c')"
proof(intro rel_funI)
fix x y assume "A x y"
then have "x ∈ 𝔘A" by (meson Domainp.DomainI assms(1))
then have "f x = c" by (rule assms(2)[rule_format])
show "B (f x) (const_fun c' y)"
unfolding ‹f x = c› const_fun_def by (rule assms(3))
qed
subsection‹Transfer rules suitable for instantiation›
lemma Domainp_eq_Collect: "Domainp A = (λx. x ∈ 𝔘) = (𝔘 = Collect (Domainp A))"
by (metis mem_Collect_eq pred_equals_eq)
context
includes lifting_syntax
begin
lemma tts_rel_set_transfer:
fixes A :: "'al ⇒ 'ar ⇒ bool"
and B :: "'bl ⇒ 'br ⇒ bool"
assumes "S ⊆ Collect (Domainp A)"
shows "∃S'. rel_set A S S'"
by (metis assms(1)[folded Ball_Collect] DomainpE Domainp_set)
lemma tts_AB_transfer:
fixes f :: "'al ⇒ 'bl"
and A :: "'al ⇒ 'ar ⇒ bool"
and B :: "'bl ⇒ 'br ⇒ bool"
assumes closed: "f ` 𝔘A ⊆ 𝔘B"
assumes
"Domainp A = (λx. x ∈ 𝔘A)" "bi_unique A" "right_total A"
"Domainp B = (λx. x ∈ 𝔘B)" "bi_unique B" "right_total B"
shows "∃rcdt. (A ===> B) f rcdt"
proof-
from closed have closed': "x ∈ 𝔘A ⟹ f x ∈ 𝔘B" for x by auto
from assms(3,4) obtain Rep_a :: "'ar ⇒ 'al" and Abs_a :: "'al ⇒ 'ar"
where td_a: "type_definition Rep_a Abs_a (Collect (Domainp A))"
and A_Rep: "A b b' = (b = Rep_a b')" for b b'
by (blast dest: ex_type_definition)
from assms(6,7) obtain Rep_b :: "'br ⇒ 'bl" and Abs_b :: "'bl ⇒ 'br"
where td_b: "type_definition Rep_b Abs_b (Collect (Domainp B))"
and B_Rep: "B b b' = (b = Rep_b b')" for b b'
by (blast dest: ex_type_definition)
define f' where f': "f' = (Rep_a ---> Abs_b) f"
have f_closed: "f (Rep_a a) ∈ 𝔘B" for a
using td_a td_b by (intro closed') (simp add: assms(2,5))+
have rep_abs_b: "y ∈ 𝔘B ⟹ y = Rep_b (Abs_b y)" for y
using td_b unfolding type_definition_def by (simp add: assms(5))
have "(A ===> B) f f'"
unfolding f' map_fun_apply
by
(
intro rel_funI,
unfold A_Rep B_Rep,
elim ssubst,
intro rep_abs_b f_closed
)
then show ?thesis by auto
qed
lemma tts_AB_transfer':
fixes f' :: "'ar ⇒ 'br"
and A :: "'al ⇒ 'ar ⇒ bool"
and B :: "'bl ⇒ 'br ⇒ bool"
assumes
"Domainp A = (λx. x ∈ 𝔘A)" "bi_unique A" "right_total A"
"Domainp B = (λx. x ∈ 𝔘B)" "bi_unique B" "right_total B"
shows "∃f. (A ===> B) f f'"
proof-
from assms(2,3) obtain Rep_a :: "'ar ⇒ 'al" and Abs_a :: "'al ⇒ 'ar"
where td_a: "type_definition Rep_a Abs_a (Collect (Domainp A))"
and A_Rep: "A b b' = (b = Rep_a b')" for b b'
by (blast dest: ex_type_definition)
from assms(5,6) obtain Rep_b :: "'br ⇒ 'bl" and Abs_b :: "'bl ⇒ 'br"
where td_b: "type_definition Rep_b Abs_b (Collect (Domainp B))"
and B_Rep: "B b b' = (b = Rep_b b')" for b b'
by (blast dest: ex_type_definition)
define f where f: "f = (Abs_a ---> Rep_b) f'"
have abs_rep_a: "y = Abs_a (Rep_a y)" for y
using td_a unfolding type_definition_def by simp
have "(A ===> B) f f'"
unfolding f map_fun_apply
by
(
intro rel_funI,
unfold A_Rep B_Rep,
elim ssubst,
fold abs_rep_a,
intro refl
)
then show ?thesis by auto
qed
lemma tts_AB_C_transfer:
fixes f :: "'al ⇒ 'bl ⇒ 'cl"
and A :: "'al ⇒ 'ar ⇒ bool"
and B :: "'bl ⇒ 'br ⇒ bool"
and C :: "'cl ⇒ 'cr ⇒ bool"
assumes closed: "⋀a b. ⟦ a ∈ 𝔘A; b ∈ 𝔘B ⟧ ⟹ f a b ∈ 𝔘C"
assumes
"Domainp A = (λx. x ∈ 𝔘A)" "bi_unique A" "right_total A"
"Domainp B = (λx. x ∈ 𝔘B)" "bi_unique B" "right_total B"
"Domainp C = (λx. x ∈ 𝔘C)" "bi_unique C" "right_total C"
shows "∃rcdt. (A ===> B ===> C) f rcdt"
proof-
from assms(3,4) obtain Rep_a :: "'ar ⇒ 'al" and Abs_a :: "'al ⇒ 'ar"
where td_a: "type_definition Rep_a Abs_a (Collect (Domainp A))"
and A_Rep: "A b b' = (b = Rep_a b')" for b b'
by (blast dest: ex_type_definition)
from assms(6,7) obtain Rep_b :: "'br ⇒ 'bl" and Abs_b :: "'bl ⇒ 'br"
where td_b: "type_definition Rep_b Abs_b (Collect (Domainp B))"
and B_Rep: "B b b' = (b = Rep_b b')" for b b'
by (blast dest: ex_type_definition)
from assms(9,10) obtain Rep_c :: "'cr ⇒ 'cl" and Abs_c :: "'cl ⇒ 'cr"
where td_c: "type_definition Rep_c Abs_c (Collect (Domainp C))"
and C_Rep: "C b b' = (b = Rep_c b')" for b b'
by (blast dest: ex_type_definition)
define f' where f': "f' = (Rep_a ---> Rep_b ---> Abs_c) f"
from td_a td_b td_c have f_closed: "f (Rep_a a) (Rep_b b) ∈ 𝔘C" for a b
by (intro closed; simp_all add: assms(2,5,8))+
have rep_abs_c: "y ∈ 𝔘C ⟹ y = Rep_c (Abs_c y)" for y
using td_c unfolding type_definition_def by (simp add: assms(8))
have "(A ===> B ===> C) f f'"
unfolding f' map_fun_apply
by
(
intro rel_funI,
unfold A_Rep B_Rep C_Rep,
elim ssubst,
intro rep_abs_c f_closed
)
then show ?thesis by auto
qed
lemma tts_AA_A_transfer:
fixes A :: "'a ⇒ 'b ⇒ bool" and f :: "'a ⇒ 'a ⇒ 'a"
assumes closed: "⋀a b. ⟦ a ∈ 𝔘; b ∈ 𝔘 ⟧ ⟹ f a b ∈ 𝔘"
assumes "Domainp A = (λx. x ∈ 𝔘)" "bi_unique A" "right_total A"
shows "∃rcdt. (A ===> A ===> A) f rcdt"
using closed
by (rule tts_AB_C_transfer[OF _ assms(2-4) assms(2-4) assms(2-4)])
lemma tts_AA_eq_transfer:
fixes A :: "'a ⇒ 'b ⇒ bool" and f :: "'a ⇒ 'a ⇒ bool"
assumes "Domainp A = (λx. x ∈ 𝔘)" "bi_unique A" "right_total A"
shows "∃rcdt. (A ===> A ===> (=)) f rcdt"
proof-
have closed: "f x y ∈ UNIV" for x y by auto
have dom_eq: "Domainp (=) = (λx. x ∈ UNIV)" by auto
from tts_AB_C_transfer[
OF _ assms(1-3) assms(1-3) dom_eq bi_unique_eq right_total_eq
]
show ?thesis by auto
qed
lemma Dom_fun_eq_set:
assumes
"Domainp A = (λx. x ∈ 𝔘A)" "bi_unique A" "right_total A"
"Domainp B = (λx. x ∈ 𝔘B)" "bi_unique B" "right_total B"
shows "{f. f ` 𝔘A ⊆ 𝔘B} = Collect (Domainp (A ===> B))"
proof(standard; (standard, intro CollectI, elim CollectE DomainpE))
fix x assume "x ` 𝔘A ⊆ 𝔘B"
from tts_AB_transfer[OF this assms] obtain x' where xx':
"(A ===> B) x x'" by clarsimp
show "Domainp (A ===> B) x" by standard (rule xx')
next
fix x b assume "(A ===> B) x b"
then show "x ` 𝔘A ⊆ 𝔘B"
unfolding
rel_fun_def
Domainp_eq_Collect[THEN iffD1, OF assms(1)]
Domainp_eq_Collect[THEN iffD1, OF assms(4)]
by auto
qed
lemma Dom_AB_eq_pred:
assumes
"Domainp A = (λx. x ∈ 𝔘A)" "bi_unique A" "right_total A"
"Domainp B = (λx. x ∈ 𝔘B)" "bi_unique B" "right_total B"
shows "(Domainp (A ===> B) f) = (f ` 𝔘A ⊆ 𝔘B)"
using Dom_fun_eq_set[OF assms] by blast
end
endTheory Manual_Prerequisites
section‹Prerequisites›
theory Manual_Prerequisites
imports
"../ETTS"
"HOL-Library.LaTeXsugar"
begin
ML_file ‹~~/src/Doc/antiquote_setup.ML›
notation rel_fun (infixr "===>" 55)
notation map_fun (infixr "--->" 55)
type_notation bool (‹𝔹›)
endFile ‹~~/src/Doc/antiquote_setup.ML›
structure Antiquote_Setup: sig end =
struct
fun translate f = Symbol.explode #> map f #> implode;
val clean_string = translate
(fn "_" => "\\_"
| "#" => "\\#"
| "$" => "\\$"
| "%" => "\\%"
| "<" => "$<$"
| ">" => "$>$"
| "{" => "\\{"
| "|" => "$\\mid$"
| "}" => "\\}"
| "‐" => "-"
| c => c);
fun clean_name "…" = "dots"
| clean_name ".." = "ddot"
| clean_name "." = "dot"
| clean_name "_" = "underscore"
| clean_name "{" = "braceleft"
| clean_name "}" = "braceright"
| clean_name s = s |> translate (fn "_" => "-" | "‐" => "-" | c => c);
local
fun ml_val (toks1, []) = ML_Lex.read "fn _ => (" @ toks1 @ ML_Lex.read ");"
| ml_val (toks1, toks2) =
ML_Lex.read "fn _ => (" @ toks1 @ ML_Lex.read " : " @ toks2 @ ML_Lex.read ");";
fun ml_op (toks1, []) = ML_Lex.read "fn _ => (op " @ toks1 @ ML_Lex.read ");"
| ml_op (toks1, toks2) =
ML_Lex.read "fn _ => (op " @ toks1 @ ML_Lex.read " : " @ toks2 @ ML_Lex.read ");";
fun ml_type (toks1, []) = ML_Lex.read "val _ = NONE : (" @ toks1 @ ML_Lex.read ") option;"
| ml_type (toks1, toks2) =
ML_Lex.read "val _ = [NONE : (" @ toks1 @ ML_Lex.read ") option, NONE : (" @
toks2 @ ML_Lex.read ") option];";
fun ml_exception (toks1, []) = ML_Lex.read "fn _ => (" @ toks1 @ ML_Lex.read " : exn);"
| ml_exception (toks1, toks2) =
ML_Lex.read "fn _ => (" @ toks1 @ ML_Lex.read " : " @ toks2 @ ML_Lex.read " -> exn);";
fun ml_structure (toks, _) =
ML_Lex.read "functor XXX() = struct structure XX = " @ toks @ ML_Lex.read " end;";
fun ml_functor (Antiquote.Text tok :: _, _) =
ML_Lex.read "ML_Env.check_functor " @
ML_Lex.read (ML_Syntax.print_string (ML_Lex.content_of tok))
| ml_functor _ = raise Fail "Bad ML functor specification";
val is_name =
ML_Lex.kind_of #> (fn kind => kind = ML_Lex.Ident orelse kind = ML_Lex.Long_Ident);
fun ml_name txt =
(case filter is_name (ML_Lex.tokenize txt) of
toks as [_] => ML_Lex.flatten toks
| _ => error ("Single ML name expected in input: " ^ quote txt));
fun prep_ml source =
(#1 (Input.source_content source), ML_Lex.read_source source);
fun index_ml name kind ml = Thy_Output.antiquotation_raw name
(Scan.lift (Args.text_input -- Scan.option (Args.colon |-- Args.text_input)))
(fn ctxt => fn (source1, opt_source2) =>
let
val (txt1, toks1) = prep_ml source1;
val (txt2, toks2) =
(case opt_source2 of
SOME source => prep_ml source
| NONE => ("", []));
val txt =
if txt2 = "" then txt1
else if kind = "type" then txt1 ^ " = " ^ txt2
else if kind = "exception" then txt1 ^ " of " ^ txt2
else if Symbol_Pos.is_identifier (Long_Name.base_name (ml_name txt1))
then txt1 ^ ": " ^ txt2
else txt1 ^ " : " ^ txt2;
val txt' = if kind = "" then txt else kind ^ " " ^ txt;
val pos = Input.pos_of source1;
val _ =
ML_Context.eval_in (SOME ctxt) ML_Compiler.flags pos (ml (toks1, toks2))
handle ERROR msg => error (msg ^ Position.here pos);
val kind' = if kind = "" then "ML" else "ML " ^ kind;
in
Latex.block
[Latex.string ("\\indexdef{}{" ^ kind' ^ "}{" ^ clean_string (ml_name txt1) ^ "}"),
Thy_Output.verbatim ctxt txt']
end);
in
val _ =
Theory.setup
(index_ml \<^binding>‹index_ML› "" ml_val #>
index_ml \<^binding>‹index_ML_op› "infix" ml_op #>
index_ml \<^binding>‹index_ML_type› "type" ml_type #>
index_ml \<^binding>‹index_ML_exception› "exception" ml_exception #>
index_ml \<^binding>‹index_ML_structure› "structure" ml_structure #>
index_ml \<^binding>‹index_ML_functor› "functor" ml_functor);
end;
val _ =
Theory.setup (Thy_Output.antiquotation_raw \<^binding>‹named_thms›
(Scan.repeat (Attrib.thm -- Scan.lift (Args.parens Args.name)))
(fn ctxt =>
map (fn (thm, name) =>
Output.output
(Document_Antiquotation.format ctxt
(Document_Antiquotation.delimit ctxt (Thy_Output.pretty_thm ctxt thm))) ^
enclose "\\rulename{" "}" (Output.output name))
#> space_implode "\\par\\smallskip%\n"
#> Latex.string #> single
#> Thy_Output.isabelle ctxt));
local
fun no_check (_: Proof.context) (name, _: Position.T) = name;
fun check_keyword ctxt (name, pos) =
if Keyword.is_keyword (Thy_Header.get_keywords' ctxt) name then name
else error ("Bad outer syntax keyword " ^ quote name ^ Position.here pos);
fun check_system_option ctxt arg =
(Completion.check_option (Options.default ()) ctxt arg; true)
handle ERROR _ => false;
val arg = enclose "{" "}" o clean_string;
fun entity check markup binding index =
Thy_Output.antiquotation_raw
(binding |> Binding.map_name (fn name => name ^
(case index of NONE => "" | SOME true => "_def" | SOME false => "_ref")))
(Scan.lift (Scan.optional (Args.parens Args.name) "" -- Args.name_position))
(fn ctxt => fn (logic, (name, pos)) =>
let
val kind = translate (fn "_" => " " | c => c) (Binding.name_of binding);
val hyper_name =
"{" ^ Long_Name.append kind (Long_Name.append logic (clean_name name)) ^ "}";
val hyper =
enclose ("\\hyperlink" ^ hyper_name ^ "{") "}" #>
index = SOME true ? enclose ("\\hypertarget" ^ hyper_name ^ "{") "}";
val idx =
(case index of
NONE => ""
| SOME is_def =>
"\\index" ^ (if is_def then "def" else "ref") ^ arg logic ^ arg kind ^ arg name);
val _ =
if Context_Position.is_reported ctxt pos then ignore (check ctxt (name, pos)) else ();
val latex =
idx ^
(Output.output name
|> (if markup = "" then I else enclose ("\\" ^ markup ^ "{") "}")
|> hyper o enclose "\\mbox{\\isa{" "}}");
in Latex.string latex end);
fun entity_antiqs check markup kind =
entity check markup kind NONE #>
entity check markup kind (SOME true) #>
entity check markup kind (SOME false);
in
val _ =
Theory.setup
(entity_antiqs no_check "" \<^binding>‹syntax› #>
entity_antiqs Outer_Syntax.check_command "isacommand" \<^binding>‹command› #>
entity_antiqs check_keyword "isakeyword" \<^binding>‹keyword› #>
entity_antiqs check_keyword "isakeyword" \<^binding>‹element› #>
entity_antiqs Method.check_name "" \<^binding>‹method› #>
entity_antiqs Attrib.check_name "" \<^binding>‹attribute› #>
entity_antiqs no_check "" \<^binding>‹fact› #>
entity_antiqs no_check "" \<^binding>‹variable› #>
entity_antiqs no_check "" \<^binding>‹case› #>
entity_antiqs Document_Antiquotation.check "" \<^binding>‹antiquotation› #>
entity_antiqs Document_Antiquotation.check_option "" \<^binding>‹antiquotation_option› #>
entity_antiqs Document_Marker.check "" \<^binding>‹document_marker› #>
entity_antiqs no_check "isasystem" \<^binding>‹setting› #>
entity_antiqs check_system_option "isasystem" \<^binding>‹system_option› #>
entity_antiqs no_check "" \<^binding>‹inference› #>
entity_antiqs no_check "isasystem" \<^binding>‹executable› #>
entity_antiqs Isabelle_Tool.check "isatool" \<^binding>‹tool› #>
entity_antiqs ML_Context.check_antiquotation "" \<^binding>‹ML_antiquotation› #>
entity_antiqs (K JEdit.check_action) "isasystem" \<^binding>‹action›);
end;
end;
Theory ETTS_Tests
section‹A test suite for ETTS›
theory ETTS_Tests
imports
"../ETTS_Auxiliary"
Conditional_Transfer_Rule.IML_UT
begin
subsection‹‹amend_ctxt_data››
ML_file‹ETTS_TEST_AMEND_CTXT_DATA.ML›
locale test_amend_ctxt_data =
fixes UA :: "'ao set" and UB :: "'bo set" and UC :: "'co set"
and le :: "['ao, 'ao] ⇒ bool" (infix ‹≤⇩o⇩w› 50)
and ls :: "['bo, 'bo] ⇒ bool" (infix ‹<⇩o⇩w› 50)
and f :: "['ao, 'bo] ⇒ 'co"
assumes closed_f: "a ∈ UA ⟹ b ∈ UB ⟹ f a b ∈ UC"
begin
notation le (‹'(≤⇩o⇩w')›)
and le (infix ‹≤⇩o⇩w› 50)
and ls (‹'(<⇩o⇩w')›)
and ls (infix ‹<⇩o⇩w› 50)
tts_register_sbts ‹(≤⇩o⇩w)› | UA
proof-
assume "Domainp AOA = (λx. x ∈ UA)" "bi_unique AOA" "right_total AOA"
from tts_AA_eq_transfer[OF this] show ?thesis by auto
qed
tts_register_sbts ‹(<⇩o⇩w)› | UB
proof-
assume "Domainp BOA = (λx. x ∈ UB)" "bi_unique BOA" "right_total BOA"
from tts_AA_eq_transfer[OF this] show ?thesis by auto
qed
tts_register_sbts f | UA and UB and UC
proof-
assume
"Domainp AOC = (λx. x ∈ UA)" "bi_unique AOC" "right_total AOC"
"Domainp BOB = (λx. x ∈ UB)" "bi_unique BOB" "right_total BOB"
"Domainp COA = (λx. x ∈ UC)" "bi_unique COA" "right_total COA"
from tts_AB_C_transfer[OF closed_f this] show ?thesis by auto
qed
end
context test_amend_ctxt_data
begin
ML‹
val tts_test_amend_ctxt_data_test_results =
etts_test_amend_ctxt_data.execute_test_suite_amend_context_data @{context}
›
ML‹
val _ = tts_test_amend_ctxt_data_test_results
|> UT_Test_Suite.output_test_results true
›
end
subsection‹‹tts_algorithm››
text‹
Some of the elements of the content of this section are based on the
elements of the content of \cite{cain_nine_2019}.
›
lemma exI': "P x ⟹ ∃x. P x" by auto
class tta_mult =
fixes tta_mult :: "'a ⇒ 'a ⇒ 'a" (infixl "*⇩t⇩t⇩a" 65)
class tta_semigroup = tta_mult +
assumes tta_assoc[simp]: "(a *⇩t⇩t⇩a b) *⇩t⇩t⇩a c = a *⇩t⇩t⇩a (b *⇩t⇩t⇩a c)"
definition set_mult :: "'a::tta_mult set ⇒ 'a set ⇒ 'a set" (infixl "❙*⇩t⇩t⇩a" 65)
where "set_mult S T = {s *⇩t⇩t⇩a t | s t. s ∈ S ∧ t ∈ T}"
definition left_ideal :: "'a::tta_mult set ⇒ bool"
where "left_ideal T ⟷ (∀s. ∀t∈T. s *⇩t⇩t⇩a t ∈ T)"
lemma left_idealI[intro]:
assumes "⋀s t. t ∈ T ⟹ s *⇩t⇩t⇩a t ∈ T"
shows "left_ideal T"
using assms unfolding left_ideal_def by simp
lemma left_idealE[elim]:
assumes "left_ideal T"
obtains "⋀s t. t ∈ T ⟹ s *⇩t⇩t⇩a t ∈ T"
using assms unfolding left_ideal_def by simp
lemma left_ideal_set_mult_iff: "left_ideal T ⟷ UNIV ❙*⇩t⇩t⇩a T ⊆ T"
unfolding left_ideal_def set_mult_def by auto
ud ‹set_mult›
ud ‹left_ideal›
ctr relativization
synthesis ctr_simps
assumes [transfer_domain_rule]: "Domainp A = (λx. x ∈ U)"
and [transfer_rule]: "bi_unique A" "right_total A"
trp (?'a A)
in set_mult_ow: set_mult.with_def
and left_ideal_ow: left_ideal.with_def
locale tta_semigroup_hom =
fixes f :: "'a::tta_semigroup ⇒ 'b::tta_semigroup"
assumes hom: "f (a *⇩t⇩t⇩a b) = f a *⇩t⇩t⇩a f b"
context tta_semigroup_hom
begin
lemma tta_left_ideal_closed:
assumes "left_ideal T" and "surj f"
shows "left_ideal (f ` T)"
unfolding left_ideal_def
proof(intro allI ballI)
fix fs ft assume prems: "ft ∈ f ` T"
then obtain t where t: "t ∈ T" and ft_def: "ft = f t" by clarsimp
from assms(2) obtain s where fs_def: "fs = f s" by auto
from assms have "t ∈ T ⟹ s *⇩t⇩t⇩a t ∈ T" for s t by auto
with t show "fs *⇩t⇩t⇩a ft ∈ f ` T"
unfolding ft_def fs_def hom[symmetric] by simp
qed
end
locale semigroup_mult_hom_with =
dom: tta_semigroup times_a + cod: tta_semigroup times_b
for times_a (infixl ‹*⇩o⇩w⇩.⇩a› 70) and times_b (infixl ‹*⇩o⇩w⇩.⇩b› 70) +
fixes f :: "'a ⇒ 'b"
assumes f_hom: "f (a *⇩o⇩w⇩.⇩a b) = f a *⇩o⇩w⇩.⇩b f b"
lemma semigroup_mult_hom_with[ud_with]:
"tta_semigroup_hom = semigroup_mult_hom_with (*⇩t⇩t⇩a) (*⇩t⇩t⇩a)"
unfolding
semigroup_mult_hom_with_def tta_semigroup_hom_def
class.tta_semigroup_def semigroup_mult_hom_with_axioms_def
by auto
locale semigroup_ow =
fixes U :: "'ag set" and f :: "['ag, 'ag] ⇒ 'ag" (infixl ‹❙*⇩o⇩w› 70)
assumes f_closed: "⟦ a ∈ U; b ∈ U ⟧ ⟹ a ❙*⇩o⇩w b ∈ U"
and assoc: "⟦ a ∈ U; b ∈ U; c ∈ U ⟧ ⟹ a ❙*⇩o⇩w b ❙*⇩o⇩w c = a ❙*⇩o⇩w (b ❙*⇩o⇩w c)"
begin
notation f (infixl ‹❙*⇩o⇩w› 70)
lemma f_closed'[simp]: "∀x∈U. ∀y∈U. x ❙*⇩o⇩w y ∈ U" by (simp add: f_closed)
tts_register_sbts ‹(❙*⇩o⇩w)› | U by (rule tts_AA_A_transfer[OF f_closed])
end
locale times_ow =
fixes U :: "'ag set" and times :: "['ag, 'ag] ⇒ 'ag" (infixl ‹*⇩o⇩w› 70)
assumes times_closed[simp, intro]: "⟦ a ∈ U; b ∈ U ⟧ ⟹ a *⇩o⇩w b ∈ U"
begin
notation times (infixl ‹*⇩o⇩w› 70)
lemma times_closed'[simp]: "∀x∈U. ∀y∈U. x *⇩o⇩w y ∈ U" by simp
tts_register_sbts ‹(*⇩o⇩w)› | U by (rule tts_AA_A_transfer[OF times_closed])
end
locale semigroup_mult_ow = times_ow U times
for U :: "'ag set" and times +
assumes mult_assoc:
"⟦ a ∈ U; b ∈ U; c ∈ U ⟧ ⟹ a *⇩o⇩w b *⇩o⇩w c = a *⇩o⇩w (b *⇩o⇩w c)"
begin
sublocale mult: semigroup_ow U ‹(*⇩o⇩w)›
by unfold_locales (auto simp: times_closed' mult_assoc)
end
locale semigroup_mult_hom_ow =
dom: semigroup_mult_ow UA times_a +
cod: semigroup_mult_ow UB times_b
for UA :: "'a set"
and UB :: "'b set"
and times_a (infixl ‹*⇩o⇩w⇩.⇩a› 70)
and times_b (infixl ‹*⇩o⇩w⇩.⇩b› 70) +
fixes f :: "'a ⇒ 'b"
assumes f_closed[simp]: "a ∈ UA ⟹ f a ∈ UB"
and f_hom: "⟦ a ∈ UA; b ∈ UA ⟧ ⟹ f (a *⇩o⇩w⇩.⇩a b) = f a *⇩o⇩w⇩.⇩b f b"
begin
lemma f_closed'[simp]: "f ` UA ⊆ UB" by auto
tts_register_sbts f | UA and UB by (rule tts_AB_transfer[OF f_closed'])
end
context semigroup_mult_hom_ow
begin
lemma tta_left_ideal_ow_closed:
assumes "T ⊆ UA"
and "left_ideal_ow UA (*⇩o⇩w⇩.⇩a) T"
and "f ` UA = UB"
shows "left_ideal_ow UB (*⇩o⇩w⇩.⇩b) (f ` T)"
unfolding left_ideal_ow_def
proof(intro ballI)
fix fs ft assume ft: "ft ∈ f ` T" and fs: "fs ∈ UB"
then obtain t where t: "t ∈ T" and ft_def: "ft = f t" by auto
from assms(3) fs obtain s where fs_def: "fs = f s" and s: "s ∈ UA" by auto
from assms have "⟦ s ∈ UA; t ∈ T ⟧ ⟹ s *⇩o⇩w⇩.⇩a t ∈ T" for s t
unfolding left_ideal_ow_def by simp
with assms(1) s t fs show "fs *⇩o⇩w⇩.⇩b ft ∈ f ` T"
using f_hom[symmetric, OF s] unfolding ft_def fs_def by auto
qed
end
lemma semigroup_mult_ow: "class.tta_semigroup = semigroup_mult_ow UNIV"
unfolding
class.tta_semigroup_def semigroup_mult_ow_def
semigroup_mult_ow_axioms_def times_ow_def
by simp
lemma semigroup_mult_hom_ow:
"tta_semigroup_hom = semigroup_mult_hom_ow UNIV UNIV (*⇩t⇩t⇩a) (*⇩t⇩t⇩a)"
unfolding
tta_semigroup_hom_def semigroup_mult_hom_ow_axioms_def
semigroup_mult_hom_ow_def semigroup_mult_ow_def
semigroup_mult_ow_axioms_def times_ow_def
by simp
context
includes lifting_syntax
begin
lemma semigroup_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "right_total A"
shows
"((A ===> A ===> A) ===> (=))
(semigroup_ow (Collect (Domainp A))) semigroup"
proof-
let ?P = "((A ===> A ===> A) ===> (=))"
let ?semigroup_ow = "semigroup_ow (Collect (Domainp A))"
let ?rf_UNIV =
"(λf::['b, 'b] ⇒ 'b. (∀x y. x ∈ UNIV ⟶ y ∈ UNIV ⟶ f x y ∈ UNIV))"
have "?P ?semigroup_ow (λf. ?rf_UNIV f ∧ semigroup f)"
unfolding semigroup_ow_def semigroup_def
apply transfer_prover_start
apply transfer_step+
by simp
thus ?thesis by simp
qed
lemma semigroup_mult_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "right_total A"
shows
"((A ===> A ===> A) ===> (=))
(semigroup_mult_ow (Collect (Domainp A)))
class.tta_semigroup"
proof -
let ?P = ‹((A ===> A ===> A) ===> (=))›
and ?semigroup_mult_ow =
‹(λf. semigroup_mult_ow (Collect (Domainp A)) f)›
and ?rf_UNIV =
‹(λf::['b, 'b] ⇒ 'b. (∀x y. x ∈ UNIV ⟶ y ∈ UNIV ⟶ f x y ∈ UNIV))›
have "?P ?semigroup_mult_ow (λf. ?rf_UNIV f ∧ class.tta_semigroup f)"
unfolding
semigroup_mult_ow_def class.tta_semigroup_def
semigroup_mult_ow_axioms_def times_ow_def
apply transfer_prover_start
apply transfer_step+
by simp
thus ?thesis by simp
qed
lemma semigroup_mult_hom_transfer[transfer_rule]:
assumes [transfer_rule]:
"bi_unique A" "right_total A" "bi_unique B" "right_total B"
shows
"((A ===> A ===> A) ===> (B ===> B ===> B) ===> (A ===> B) ===> (=))
(semigroup_mult_hom_ow (Collect (Domainp A)) (Collect (Domainp B)))
semigroup_mult_hom_with"
proof-
let ?PA = "A ===> A ===> A"
and ?PB = "B ===> B ===> B"
and ?semigroup_mult_hom_ow =
‹
λtimes_a times_b f. semigroup_mult_hom_ow
(Collect (Domainp A)) (Collect (Domainp B)) times_a times_b f
›
let ?closed = ‹λf::'b⇒'d . ∀a. a ∈ UNIV ⟶ f a ∈ UNIV›
have
"(?PA ===> ?PB ===> (A ===> B) ===> (=))
?semigroup_mult_hom_ow
(
λtimes_a times_b f.
?closed f ∧ semigroup_mult_hom_with times_a times_b f
)"
unfolding
times_ow_def
semigroup_mult_hom_ow_def
semigroup_mult_hom_ow_axioms_def
semigroup_mult_hom_with_def
semigroup_mult_hom_with_axioms_def
apply transfer_prover_start
apply transfer_step+
by blast
thus ?thesis by simp
qed
end
context semigroup_mult_hom_ow
begin
ML_file‹ETTS_TEST_TTS_ALGORITHM.ML›
named_theorems semigroup_mult_hom_ow_test_simps
lemmas [semigroup_mult_hom_ow_test_simps] =
ctr_simps_Collect_mem_eq
ctr_simps_in_iff
tts_context
tts: (?'a to UA) and (?'b to UB)
sbterms: (‹(*⇩t⇩t⇩a)::[?'a::tta_semigroup, ?'a] ⇒ ?'a› to ‹(*⇩o⇩w⇩.⇩a)›)
and (‹(*⇩t⇩t⇩a)::[?'b::tta_semigroup, ?'b] ⇒ ?'b› to ‹(*⇩o⇩w⇩.⇩b)›)
and (‹?f::?'a::tta_semigroup ⇒ ?'b::tta_semigroup› to f)
rewriting semigroup_mult_hom_ow_test_simps
substituting semigroup_mult_hom_ow_axioms
and dom.semigroup_mult_ow_axioms
and cod.semigroup_mult_ow_axioms
eliminating ‹UA ≠ {}› and ‹UB ≠ {}›
through (auto simp only: left_ideal_ow_def)
begin
ML‹
val tts_test_amend_ctxt_data_test_results =
etts_test_tts_algorithm.execute_test_suite_tts_algorithm @{context}
›
ML‹
val _ = tts_test_amend_ctxt_data_test_results
|> UT_Test_Suite.output_test_results true
›
end
end
subsection‹‹tts_register_sbts››
context
fixes f :: "'a ⇒ 'b ⇒ 'c"
and UA :: "'a set"
begin
ML_file‹ETTS_TEST_TTS_REGISTER_SBTS.ML›
ML‹
val tts_test_tts_register_sbts_test_results =
etts_test_tts_register_sbts.execute_test_suite_tts_register_sbts @{context}
›
ML‹
val _ = tts_test_tts_register_sbts_test_results
|> UT_Test_Suite.output_test_results true
›
end
endFile ‹ETTS_TEST_AMEND_CTXT_DATA.ML›
signature ETTS_TEST_AMEND_CTXT_DATA =
sig
val execute_test_suite_amend_context_data :
Proof.context ->
(
ETTS_Context.amend_ctxt_data_input_type * Proof.context,
ETTS_Context.ctxt_def_type * Proof.context
) UT_Test_Suite.test_results_suite
end;
structure etts_test_amend_ctxt_data : ETTS_TEST_AMEND_CTXT_DATA =
struct
fun mk_msg_tts_ctxt_error msg = "tts_context: " ^ msg
local
val eq_eps_src_msg = "eq_eps_src: comparison is not possible"
in
fun eq_eps_src (src, src') =
let
val eq_name = Token.name_of_src src = Token.name_of_src src'
val eq_args = Token.args_of_src src ~~ Token.args_of_src src'
|> map eq_eps_token
|> List.all I
in eq_name andalso eq_args end
and eq_eps_token (token : Token.T, token' : Token.T) =
let
val eq_kind = Token.kind_of token = Token.kind_of token'
val eq_content = Token.content_of token = Token.content_of token'
val eq_source = Token.source_of token = Token.source_of token'
val eq_range =
Input.range_of (Token.input_of token) =
Input.range_of (Token.input_of token')
val eq_slot = (Token.get_value token, Token.get_value token')
|> eq_option eq_eps_value
in
eq_kind
andalso eq_content
andalso eq_source
andalso eq_range
andalso eq_slot
end
and eq_eps_value (Token.Source src, Token.Source src') = eq_eps_src (src, src')
| eq_eps_value (Token.Literal ltrl, Token.Literal ltrl') = ltrl = ltrl'
| eq_eps_value (Token.Name _, Token.Name _) = error eq_eps_src_msg
| eq_eps_value (Token.Typ T, Token.Typ T') = T = T'
| eq_eps_value (Token.Term t, Token.Term t') = t = t'
| eq_eps_value (Token.Fact (c_opt, thms), Token.Fact (c_opt', thms')) =
let
val eq_c = c_opt = c_opt'
val eq_thms = eq_list Thm.eq_thm (thms, thms')
in eq_c andalso eq_thms end
| eq_eps_value (Token.Attribute _, Token.Attribute _) =
error eq_eps_src_msg
| eq_eps_value (Token.Declaration _, Token.Declaration _) =
error eq_eps_src_msg
| eq_eps_value (Token.Files _, Token.Files _) =
error eq_eps_src_msg;
end;
fun eq_tts_ctxt_data
(
ctxt_data : ETTS_Context.ctxt_def_type,
ctxt_data' : ETTS_Context.ctxt_def_type
) =
let
fun eq_subst_thm (rsrc, rsrc') = fst rsrc = fst rsrc'
andalso eq_list eq_eps_src (snd rsrc, snd rsrc')
val _ = (#mpespc_opt ctxt_data, #mpespc_opt ctxt_data')
|> apply2 is_none
|> (fn (x, y) => x = true andalso y = true)
orelse error "eq_tts_ctxt_data: comparison is not possible"
val eq_rispec = #rispec ctxt_data = #rispec ctxt_data'
val eq_sbtspec = #sbtspec ctxt_data = #sbtspec ctxt_data'
val eq_subst_thms =
eq_list eq_subst_thm (#subst_thms ctxt_data, #subst_thms ctxt_data')
val eq_sbrr_opt = (#sbrr_opt ctxt_data, #sbrr_opt ctxt_data')
|> eq_option eq_subst_thm
val eq_attrbs = eq_list eq_eps_src (#attrbs ctxt_data, #attrbs ctxt_data')
in
eq_rispec
andalso eq_sbtspec
andalso eq_subst_thms
andalso eq_sbrr_opt
andalso eq_attrbs
end;
fun test_eq_tts_context (ctxt : Proof.context) =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(≤)::?'a::ord⇒?'a::ord⇒bool"
val sbt_1 = "(≤⇩o⇩w)"
val tbt_2 = "(<)::?'b::ord⇒?'b::ord⇒bool"
val sbt_2 = "(<⇩o⇩w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args : ETTS_Context.amend_ctxt_data_input_type =
(((((rispec, sbtspec), NONE), []), NONE), [])
val s_a_ix : indexname = ("'a", 0)
val s_a_T = TVar (s_a_ix, \<^sort>‹ord›)
val aT = TFree ("'ao", \<^sort>‹type›)
val U1 = Free ("UA", HOLogic.mk_setT aT)
val s_b_ix : indexname = ("'b", 0)
val s_b_T = TVar (s_b_ix, \<^sort>‹ord›)
val bT = TFree ("'bo", \<^sort>‹type›)
val U2 = Free ("UB", HOLogic.mk_setT bT)
val less_eqt =
Const (\<^const_name>‹less_eq›, s_a_T --> s_a_T --> HOLogic.boolT)
val lesst =
Const (\<^const_name>‹less›, s_b_T --> s_b_T --> HOLogic.boolT)
val leqt = Free ("le", aT --> aT --> HOLogic.boolT)
val lst = Free ("ls", bT --> bT --> HOLogic.boolT)
val rispec = [(s_a_ix, U1), (s_b_ix, U2)]
val sbtspec = [(less_eqt, leqt), (lesst, lst)]
val subst_thms = []
val sbrr_opt = NONE
val mpespc_opt = NONE
val attrbs = []
val tts_ctxt_data_out : ETTS_Context.ctxt_def_type =
{
rispec = rispec,
sbtspec = sbtspec,
subst_thms = subst_thms,
sbrr_opt = sbrr_opt,
mpespc_opt = mpespc_opt,
attrbs = attrbs
}
in
UT_Test_Suite.assert_brel
"equality"
(eq_fst eq_tts_ctxt_data)
(tts_ctxt_data_out, ctxt)
(args, ctxt)
end;
fun test_exc_tts_context_tts_context thy =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::'a set"
val rispec1 = [(risstv1_c, U1_c)]
val args1 : ETTS_Context.amend_ctxt_data_input_type =
(((((rispec1, []), NONE), []), NONE), [])
val ctxt' = ETTS_Context.amend_context_data args1 ctxt |> snd
val risstv2_c = "?'b"
val U2_c = "U2::'b set"
val rispec2 = [(risstv2_c, U2_c)]
val args2 : ETTS_Context.amend_ctxt_data_input_type =
(((((rispec2, []), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error "nested tts contexts"
in
UT_Test_Suite.assert_exception
"nested tts contexts" (args2, ctxt') (ERROR err_msg)
end;
fun test_exc_rispec_empty thy =
let
val ctxt = Proof_Context.init_global thy;
val args = ((((([], []), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error "rispec must not be empty"
in
UT_Test_Suite.assert_exception "rispec empty" (args, ctxt) (ERROR err_msg)
end;
fun test_exc_rispec_not_set thy =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::('b list) set"
val risstv2_c = "?'b"
val U2_c = "U2::'a set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val args = (((((rispec, []), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"risset must be terms of the type of the form ?'a set or 'a set"
in
UT_Test_Suite.assert_exception
"risset are not all sets" (args, ctxt) (ERROR err_msg)
end;
fun test_exc_rispec_duplicate_risstvs thy =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::'a set"
val risstv2_c = "?'b"
val U2_c = "U2::'b set"
val risstv3_c = "?'a"
val U3_c = "U3::'c set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c), (risstv3_c, U3_c)]
val args = (((((rispec, []), NONE), []), NONE), [])
in
UT_Test_Suite.assert_exception
"duplicate risstvs"
(args, ctxt)
(ERROR "tts_context: risstvs must be distinct")
end;
fun test_exc_rispec_not_ds_dtv thy =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::'a set"
val risstv2_c = "?'b"
val U2_c = "U2::'b::{group_add, finite} set"
val risstv3_c = "?'c"
val U3_c = "U3::'c::{group_add, order} set"
val risstv4_c = "?'d"
val U4_c = "U4::'b::{group_add, order} set"
val rispec =
[(risstv1_c, U1_c), (risstv2_c, U2_c), (risstv3_c, U3_c), (risstv4_c, U4_c)]
val args = (((((rispec, []), NONE), []), NONE), [])
val err_msg =
"tts_context: risset: type variables with distinct sorts must be distinct"
in
UT_Test_Suite.assert_exception
"not distinct sorts ⟶ distinct types variables"
(args, ctxt)
(ERROR err_msg)
end;
fun test_exc_rispec_not_dt_dv thy =
let
val ctxt = Proof_Context.init_global thy;
val risstv1_c = "?'a"
val U1_c = "U1::'a set"
val risstv2_c = "?'b"
val U2_c = "U2::'b::{group_add, finite} set"
val risstv3_c = "?'c"
val U3_c = "U3::'c::{group_add, order} set"
val risstv4_c = "?'d"
val U4_c = "U2::'c::{group_add, order} set"
val rispec =
[
(risstv1_c, U1_c),
(risstv2_c, U2_c),
(risstv3_c, U3_c),
(risstv4_c, U4_c)
]
val args = (((((rispec, []), NONE), []), NONE), [])
val err_msg =
"tts_context: risset: variables with distinct types must be distinct"
in
UT_Test_Suite.assert_exception
"not distinct types ⟶ distinct variables" (args, ctxt) (ERROR err_msg)
end;
fun test_exc_distinct_sorts ctxt =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(<)::?'a::ord⇒?'a::ord⇒bool"
val sbt_1 = "(<⇩o⇩w)"
val tbt_2 = "?a::?'a::order⇒?'a::order⇒bool"
val sbt_2 = "f"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"tbts: a single stv should not have two distinct sorts associated with it"
in
UT_Test_Suite.assert_exception
"tbts: an stv with distinct sorts" (args, ctxt) (ERROR err_msg)
end;
fun test_exc_sbts_no_tis ctxt =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(<)::?'a::ord⇒?'a::ord⇒bool"
val sbt_1 = "(<⇩o⇩w)"
val tbt_2 = "(≤)::?'a::ord⇒?'a::ord⇒bool"
val sbt_2 = "(≤⇩o⇩w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"\n\t-the types of the sbts must be equivalent " ^
"to the types of the tbts up to renaming of the type variables\n" ^
"\t-to each type variable that occurs among the tbts must correspond " ^
"exactly one type variable among all type " ^
"variables that occur among all of the sbts"
in
UT_Test_Suite.assert_exception
"sbts are not type instances of tbts" (args, ctxt) (ERROR err_msg)
end;
fun test_exc_tbt_fixed ctxt =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(<)::?'a::ord⇒?'a::ord⇒bool"
val sbt_1 = "(<⇩o⇩w)"
val tbt_2 = "g::?'a::ord⇒?'a::ord⇒bool"
val sbt_2 = "(<⇩o⇩w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"tbts must consist of constants and schematic variables"
in
UT_Test_Suite.assert_exception
"tbts are not constants and schematic variables"
(args, ctxt)
(ERROR err_msg)
end;
fun test_exc_sbts_not_registered ctxt =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(<)::?'a::ord⇒?'a::ord⇒bool"
val sbt_1 = "(<⇩o⇩w)"
val tbt_2 = "(≤)::?'a::ord⇒?'a::ord⇒bool"
val sbt_2 = "g::'bo::type⇒'bo::type⇒bool"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"sbts must be registered using the command tts_register_sbts"
in
UT_Test_Suite.assert_exception
"sbts must be registered" (args, ctxt) (ERROR err_msg)
end;
fun test_exc_tbts_not_distinct ctxt =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val risstv2_c = "?'b"
val U2_c = "UB::'bo set"
val rispec = [(risstv1_c, U1_c), (risstv2_c, U2_c)]
val tbt_1 = "(≤)::?'a::ord⇒?'a::ord⇒bool"
val sbt_1 = "(<⇩o⇩w)"
val tbt_2 = "(≤)::?'a::ord⇒?'a::ord⇒bool"
val sbt_2 = "(<⇩o⇩w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args = (((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error "tbts must be distinct"
in
UT_Test_Suite.assert_exception
"tbts must be distinct"
(args, ctxt)
(ERROR err_msg)
end;
fun test_exc_sbterms_subset_rispec (ctxt : Proof.context) =
let
val risstv1_c = "?'a"
val U1_c = "UA::'ao set"
val rispec = [(risstv1_c, U1_c)]
val tbt_1 = "(≤)::?'a::ord⇒?'a::ord⇒bool"
val sbt_1 = "(≤⇩o⇩w)"
val tbt_2 = "(<)::?'b::ord⇒?'b::ord⇒bool"
val sbt_2 = "(<⇩o⇩w)"
val sbtspec = [(tbt_1, sbt_1), (tbt_2, sbt_2)]
val args : ETTS_Context.amend_ctxt_data_input_type =
(((((rispec, sbtspec), NONE), []), NONE), [])
val err_msg = mk_msg_tts_ctxt_error
"the collection of the (stv, ftv) pairs associated with the sbterms " ^
"must form a subset of the collection of the (stv, ftv) pairs " ^
"associated with the RI specification, provided that only the pairs " ^
"(stv, ftv) associated with the sbterms such that ftv occurs in a " ^
"premise of a theorem associated with an sbterm are taken into account"
in
UT_Test_Suite.assert_exception
"tbts "
(args, ctxt)
(ERROR err_msg)
end;
local
val test_amend_context_data = uncurry ETTS_Context.amend_context_data;
fun test_amend_context_string_of_input (args, ctxt) =
let
val c =
"ctxt : unknown context\n" ^
ETTS_Context.string_of_amend_context_data_args ctxt args
in c end;
in
fun mk_test_suite_amend_context_data ctxt =
let
fun string_of_tts_ctxt_data (ctxt_data, ctxt) =
ETTS_Context.string_of_tts_ctxt_data ctxt ctxt_data
val test_suite_amend_context_data = UT_Test_Suite.init
"amend_context_data"
test_amend_context_data
test_amend_context_string_of_input
string_of_tts_ctxt_data
val thy = Proof_Context.theory_of ctxt
in
test_suite_amend_context_data
|> test_eq_tts_context ctxt
|> test_exc_tts_context_tts_context thy
|> test_exc_rispec_empty thy
|> test_exc_rispec_not_set thy
|> test_exc_rispec_duplicate_risstvs thy
|> test_exc_rispec_not_ds_dtv thy
|> test_exc_rispec_not_dt_dv thy
|> test_exc_distinct_sorts ctxt
|> test_exc_sbts_no_tis ctxt
|> test_exc_tbt_fixed ctxt
|> test_exc_sbts_not_registered ctxt
|> test_exc_tbts_not_distinct ctxt
|> test_exc_sbterms_subset_rispec ctxt
end;
fun execute_test_suite_amend_context_data ctxt =
UT_Test_Suite.execute (mk_test_suite_amend_context_data ctxt)
end;
end;File ‹ETTS_TEST_TTS_ALGORITHM.ML›
signature ETTS_TEST_TTS_ALGORITHM =
sig
type tts_algorithm_in_type
val execute_test_suite_tts_algorithm :
Proof.context ->
(tts_algorithm_in_type,
(thm * int list) * Proof.context)
UT_Test_Suite.test_results_suite
end;
structure etts_test_tts_algorithm : ETTS_TEST_TTS_ALGORITHM =
struct
fun mk_msg_tts_algorithm_error msg = "tts_algorithm: " ^ msg
type tts_algorithm_in_type =
Proof.context *
ETTS_Algorithm.etts_output_type *
int list *
(indexname * term) list *
(term * term) list *
(Facts.ref * Token.src list) option *
(Facts.ref * Token.src list) list *
(term list * (Proof.context -> tactic)) option *
Token.src list *
thm;
fun string_of_writer writer = writer
|> ML_Syntax.print_list Int.toString
|> curry op^ "writer: "
fun rel_tts_algorithm_out
(
act_out : (thm * int list) * Proof.context,
exp_out : (thm * int list) * Proof.context
) =
let
val ((thm_act_out, writer_act_out), _) = act_out
val ((thm_exp_out, writer_exp_out), _) = exp_out
val t_act_out = Thm.full_prop_of thm_act_out
val t_exp_out = Thm.full_prop_of thm_exp_out
in
t_act_out aconv t_exp_out
andalso writer_act_out = writer_exp_out
end;
fun test_eq_tts_algorithm (ctxt : Proof.context) =
let
val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
val rispec = #rispec tts_ctxt_data
val sbtspec = #sbtspec tts_ctxt_data
val sbrr_opt = #sbrr_opt tts_ctxt_data
val subst_thms = #subst_thms tts_ctxt_data
val mpespc_opt = #mpespc_opt tts_ctxt_data
val attrbs = #attrbs tts_ctxt_data
val tts_output_type = ETTS_Algorithm.default
val writer_in = [1, 1, 1, 1]
val in_thm = @{thm tta_semigroup_hom.tta_left_ideal_closed}
val tts_algorithm_in : tts_algorithm_in_type =
(
ctxt,
tts_output_type,
writer_in,
rispec,
sbtspec,
sbrr_opt,
subst_thms,
mpespc_opt,
attrbs,
in_thm
)
val writer_out = [1, 3, 1, 1]
val exp_tts_algorithm_out =
((@{thm tta_left_ideal_ow_closed}, writer_out), ctxt)
in
UT_Test_Suite.assert_brel
"equality"
rel_tts_algorithm_out
exp_tts_algorithm_out
tts_algorithm_in
end;
fun test_exc_ftvs ctxt =
let
val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
val rispec = #rispec tts_ctxt_data
val sbtspec = #sbtspec tts_ctxt_data
val sbrr_opt = #sbrr_opt tts_ctxt_data
val subst_thms = #subst_thms tts_ctxt_data
val mpespc_opt = #mpespc_opt tts_ctxt_data
val attrbs = #attrbs tts_ctxt_data
val tts_output_type = ETTS_Algorithm.default
val writer_in = [1, 1, 1, 1]
val in_thm = @{thm exI'[where 'a='a]}
val tts_algorithm_in : tts_algorithm_in_type =
(
ctxt,
tts_output_type,
writer_in,
rispec,
sbtspec,
sbrr_opt,
subst_thms,
mpespc_opt,
attrbs,
in_thm
)
val err_msg = mk_msg_tts_algorithm_error
"fixed type variables must not occur in the type-based theorems"
in
UT_Test_Suite.assert_exception
"fixed type variables"
tts_algorithm_in
(ERROR err_msg)
end;
fun test_exc_fvs ctxt =
let
val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
val rispec = #rispec tts_ctxt_data
val sbtspec = #sbtspec tts_ctxt_data
val sbrr_opt = #sbrr_opt tts_ctxt_data
val subst_thms = #subst_thms tts_ctxt_data
val mpespc_opt = #mpespc_opt tts_ctxt_data
val attrbs = #attrbs tts_ctxt_data
val tts_output_type = ETTS_Algorithm.default
val writer_in = [1, 1, 1, 1]
val aT = TVar (("'a", 0), \<^sort>‹type›)
val xv = ("x", 0)
val xt = Free ("x", aT) |> Thm.cterm_of ctxt
val in_thm = Drule.instantiate_normalize ([], [((xv, aT), xt)]) @{thm exI'}
val tts_algorithm_in : tts_algorithm_in_type =
(
ctxt,
tts_output_type,
writer_in,
rispec,
sbtspec,
sbrr_opt,
subst_thms,
mpespc_opt,
attrbs,
in_thm
)
val err_msg = mk_msg_tts_algorithm_error
"fixed variables must not occur in the type-based theorems"
in
UT_Test_Suite.assert_exception
"fixed variables"
tts_algorithm_in
(ERROR err_msg)
end;
fun test_exc_not_risstv_subset ctxt =
let
val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
val rispec = #rispec tts_ctxt_data
val sbtspec = #sbtspec tts_ctxt_data
val sbrr_opt = #sbrr_opt tts_ctxt_data
val subst_thms = #subst_thms tts_ctxt_data
val mpespc_opt = #mpespc_opt tts_ctxt_data
val attrbs = #attrbs tts_ctxt_data
val tts_output_type = ETTS_Algorithm.default
val writer_in = [1, 1, 1, 1]
val in_thm = @{thm tta_semigroup.tta_assoc}
val tts_algorithm_in : tts_algorithm_in_type =
(
ctxt,
tts_output_type,
writer_in,
rispec,
sbtspec,
sbrr_opt,
subst_thms,
mpespc_opt,
attrbs,
in_thm
)
val err_msg = mk_msg_tts_algorithm_error
"risstv must be a subset of the schematic type " ^
"variables that occur in the type-based theorems"
in
UT_Test_Suite.assert_exception
"risstv is not a subset of the stvs of the type-based theorems"
tts_algorithm_in
(ERROR err_msg)
end;
fun test_not_tts_context thy =
let
val ctxt = Proof_Context.init_global thy
val tts_ctxt_data = ETTS_Context.get_tts_ctxt_data ctxt
val rispec = #rispec tts_ctxt_data
val sbtspec = #sbtspec tts_ctxt_data
val sbrr_opt = #sbrr_opt tts_ctxt_data
val subst_thms = #subst_thms tts_ctxt_data
val mpespc_opt = #mpespc_opt tts_ctxt_data
val attrbs = #attrbs tts_ctxt_data
val tts_output_type = ETTS_Algorithm.default
val writer_in = [1, 1, 1, 1]
val in_thm = @{thm tta_semigroup_hom.tta_left_ideal_closed}
val tts_algorithm_in : tts_algorithm_in_type =
(
ctxt,
tts_output_type,
writer_in,
rispec,
sbtspec,
sbrr_opt,
subst_thms,
mpespc_opt,
attrbs,
in_thm
)
val err_msg = mk_msg_tts_algorithm_error
"ERA can only be invoked from an appropriately parameterized tts context"
in
UT_Test_Suite.assert_exception
"not tts context"
tts_algorithm_in
(ERROR err_msg)
end;
local
fun string_of_rispec ctxt =
ML_Syntax.print_pair (Term.string_of_vname) (Syntax.string_of_term ctxt)
|> ML_Syntax.print_list;
fun string_of_sbtspec ctxt =
let val string_of_term = Syntax.string_of_term ctxt
in
ML_Syntax.print_pair string_of_term string_of_term
|> ML_Syntax.print_list
end;
fun tts_algorithm_string_of_input (tts_algorithm_in : tts_algorithm_in_type) =
let
val
(
ctxt,
tts_output_type,
writer,
rispec,
sbtspec,
sbrr_opt,
subst_thms,
mpespc_opt,
attrbs,
thm
) = tts_algorithm_in
val ctxt_c = "ctxt: unknown context"
val tts_output_type_c =
ETTS_Algorithm.string_of_etts_output_type tts_output_type
val writer_c = string_of_writer writer
val rispec_c = rispec |> string_of_rispec ctxt |> curry op^ "rispec: "
val sbtspec_c = sbtspec |> string_of_sbtspec ctxt |> curry op^ "sbtspec: "
val sbrr_opt_c = sbrr_opt
|> ETTS_Context.string_of_sbrr_opt
|> curry op^ "sbrr_opt: "
val subst_thms_c = subst_thms
|> ETTS_Context.string_of_subst_thms
|> curry op^ "subst_thms: "
val mpespc_opt_c = mpespc_opt
|> ETTS_Context.string_of_mpespc_opt ctxt
|> curry op^ "mpespc_opt: "
val attrbs_c = attrbs |> string_of_token_src_list |> curry op^ "attrbs: "
val thm_c = thm |> Thm.string_of_thm ctxt |> curry op^ "in_thm: "
val out_c =
[
ctxt_c,
tts_output_type_c,
writer_c,
rispec_c,
sbtspec_c,
sbrr_opt_c,
subst_thms_c,
mpespc_opt_c,
attrbs_c,
thm_c
]
|> String.concatWith "\n"
in out_c end;
fun tts_algorithm_string_of_output
(((thm, writer), ctxt) : (thm * int list) * Proof.context) =
let
val ctxt_c = "ctxt: unknown context"
val thm_c = Thm.string_of_thm ctxt thm
val writer_c = ML_Syntax.print_list Int.toString writer
val out_c = [ctxt_c, thm_c, writer_c] |> String.concatWith "\n"
in out_c end;
fun tts_algorithm (tts_algorithm_in : tts_algorithm_in_type) =
let
val
(
ctxt,
tts_output_type,
writer,
rispec,
sbtspec,
sbrr_opt,
subst_thms,
mpespc_opt,
attrbs,
thm
) = tts_algorithm_in
val tts_algorithm_out =
ETTS_Algorithm.etts_algorithm
ctxt
tts_output_type
writer
rispec
sbtspec
sbrr_opt
subst_thms
mpespc_opt
attrbs
thm
in tts_algorithm_out end;
in
fun mk_test_suite_tts_algorithm ctxt =
let
val test_suite_tts_algorithm = UT_Test_Suite.init
"tts_algorithm"
tts_algorithm
tts_algorithm_string_of_input
tts_algorithm_string_of_output
in
test_suite_tts_algorithm
|> test_eq_tts_algorithm ctxt
|> test_exc_ftvs ctxt
|> test_exc_fvs ctxt
|> test_exc_not_risstv_subset ctxt
|> test_not_tts_context (Proof_Context.theory_of ctxt)
end;
end;
fun execute_test_suite_tts_algorithm ctxt =
UT_Test_Suite.execute (mk_test_suite_tts_algorithm ctxt);
end;File ‹ETTS_TEST_TTS_REGISTER_SBTS.ML›
signature ETTS_TEST_TTS_REGISTER_SBTS =
sig
type tts_register_sbts_in_type
val execute_test_suite_tts_register_sbts :
Proof.context ->
(tts_register_sbts_in_type, Proof.state)
UT_Test_Suite.test_results_suite
end;
structure etts_test_tts_register_sbts : ETTS_TEST_TTS_REGISTER_SBTS =
struct
fun mk_msg_tts_register_sbts_error msg = "tts_register_sbts: " ^ msg
type tts_register_sbts_in_type =
(string * string list) * Proof.context
fun test_exc_fvs ctxt =
let
val sbt = "g::'q"
val UA_c = "UA::'a set"
val UB_c = "UB::'b set"
val rissest = [UA_c, UB_c]
val tts_register_sbts_in : tts_register_sbts_in_type =
((sbt, rissest), ctxt)
val err_msg = mk_msg_tts_register_sbts_error
"all fixed variables that occur in the sbterm " ^
"must be fixed in the context"
in
UT_Test_Suite.assert_exception
"variables not fixed in the context"
tts_register_sbts_in
(ERROR err_msg)
end;
fun test_exc_repeated_risset ctxt =
let
val sbt = "f"
val UA_c = "UA::'a set"
val UB_c = "UA::'a set"
val rissest = [UA_c, UB_c]
val tts_register_sbts_in : tts_register_sbts_in_type =
((sbt, rissest), ctxt)
val err_msg = mk_msg_tts_register_sbts_error
"the type variables associated with the risset must be distinct"
in
UT_Test_Suite.assert_exception
"repeated risset"
tts_register_sbts_in
(ERROR err_msg)
end;
local
fun tts_register_sbts_string_of_input
(tts_register_sbts_in : tts_register_sbts_in_type) =
let
val ((sbt, risset), _) = tts_register_sbts_in
val ctxt_c = "ctxt: unknown context"
val sbt_c = "sbt: " ^ sbt
val risset_c = "risset: " ^ ML_Syntax.print_list I risset
val out_c = [ctxt_c, sbt_c, risset_c]
|> String.concatWith "\n"
in out_c end;
fun tts_register_sbts_string_of_output (_ : Proof.state) =
let val st_c = "st: unknown state"
in st_c end;
fun tts_register_sbts ((args, ctxt) : tts_register_sbts_in_type) =
ETTS_Substitution.process_tts_register_sbts args ctxt;
in
fun mk_test_suite_tts_register_sbts ctxt =
let
val test_suite_tts_register_sbts = UT_Test_Suite.init
"tts_register_sbts"
tts_register_sbts
tts_register_sbts_string_of_input
tts_register_sbts_string_of_output
in
test_suite_tts_register_sbts
|> test_exc_fvs ctxt
|> test_exc_repeated_risset ctxt
end;
end;
fun execute_test_suite_tts_register_sbts ctxt =
UT_Test_Suite.execute (mk_test_suite_tts_register_sbts ctxt);
end;Theory ETTS_Introduction
chapter‹ETTS: Reference Manual›
section‹Introduction›
theory ETTS_Introduction
imports
Manual_Prerequisites
"HOL-Library.Conditional_Parametricity"
begin
subsection‹Background›
text‹
The \textit{standard library} that is associated with the object logic
Isabelle/HOL and provided as a part of the standard distribution of Isabelle
\cite{noauthor_isabellehol_2020}
contains a significant number of formalized results from a variety of
fields of mathematics (e.g., order theory, algebra, topology).
Nevertheless, using the vocabulary that was promoted in the original article on
Types-To-Sets \cite{blanchette_types_2016}, the formalization is
performed using a type-based approach. Thus, for example, the carrier sets
associated with the algebraic structures and the underlying sets of the
topological spaces consist of all terms of an arbitrary type. This restriction
can create an inconvenience when working with mathematical objects
induced on a subset of the carrier set/underlying set associated with
the original object (e.g., see \cite{immler_smooth_2019}).
To address this limitation, several additional libraries were developed
upon the foundations provided by the the standard library
(e.g., \textit{HOL-Algebra}
\cite{ballarin_isabellehol_2020} and \textit{HOL-Analysis}
\cite{noauthor_isabellehol_2020-1}).
In terms of the vocabulary associated with
Types-To-Sets, these libraries provide the set-based counterparts of many
type-based theorems in the standard library, along with a plethora of
additional results. Nonetheless, the proofs of the majority of the theorems
that are available in the standard library are restated explicitly in the
libraries that contain the set-based results. This unnecessary duplication of
efforts is one of the primary problems that the framework Types-To-Sets is
meant to address.
The framework Types-To-Sets offers a unified approach to structuring
mathematical knowledge formalized in Isabelle/HOL: every type-based theorem
can be converted to a set-based theorem in a semi-automated manner and the
relationship between such type-based and set-based theorems can be
articulated clearly and explicitly \cite{blanchette_types_2016}.
In this article, we describe a particular implementation of the framework
Types-To-Sets in Isabelle/HOL that takes the form of a further extension of
the language Isabelle/Isar with several new commands and attributes
(e.g., see \cite{wenzel_isabelle/isar_2019-1}).
›
subsection‹Previous work›
subsubsection‹Prerequisites and conventions›
text‹
A reader of this document is assumed to be familiar with
the proof assistant Isabelle, the proof language Isabelle/Isar,
the meta-logic Isabelle/Pure and
the object logic Isabelle/HOL, as described in,
\cite{paulson_natural_1986, wenzel_isabelle/isar_2019-1},
\cite{bertot_isar_1999, wenzel_isabelleisar_2007, wenzel_isabelle/isar_2019-1},
\cite{paulson_foundation_1989, wenzel_isabelle/isar_2019-1} and
\cite{yang_comprehending_2017}, respectively. Familiarity with the
content of the original articles about Types-To-Sets
\cite{blanchette_types_2016,kuncar_types_2019} and
the first large-scale application of Types-To-Sets
(as described in \cite{immler_smooth_2019})
is not essential but can be useful.
The notational conventions that are used in this document are
approximately equivalent to the conventions that were suggested in
\cite{blanchette_types_2016}, \cite{yang_comprehending_2017} and
\cite{kuncar_types_2019}; nonetheless, we try to provide
explanations whenever deemed essential in an attempt to make this body of work
self-contained. As a side note, the types of the
typed variables and constant-instances may be omitted
at will, if it is deemed that they can be inferred from the
context of the discussion.
›
subsubsection‹A note on global schematic polymorphism›
text‹
In Isabelle/Pure, a distinction is made between schematic and fixed
variables (for example, see \cite{paulson_foundation_1989} or
\cite{wenzel_isabelle/isar_2001}).
Implicitly, Isabelle/HOL also inherits this distinction.
More specifically, free variables that occur in the theorems at the top-level
of the theory context are generalized implicitly, which may be expressed by
replacing fixed variables by schematic variables
(e.g., see \cite{wenzel_isabelle/isar_2001}).
However, from the perspective of logic,
the distinction between the fixed and the schematic variables
is superficial: they are merely distinct syntactic expressions
of the same formal concept of variables
(e.g., see \cite{wenzel_isabelle/isar_2001}).
In this document, following a standard convention,
the names of the schematic variables will be prefixed with the question
mark ``$?$''. Thus, $?a$, $?b$, $\ldots$ will be used for the denotation
of schematic term variables and $?\alpha$, $?\beta$, $\ldots$ will be used
for the denotation of the schematic type variables.
Like in the previous work on Types-To-Sets, by abuse of notation,
explicit quantification over the type variables at the top level is allowed:
$\forall \alpha. \phi\left[\alpha\right]$. However,
the schematic variables will nearly always be treated
explicitly, like they are treated in the programmatic implementation
of the algorithms. It should also be noted that, apart from the
conventional use of the square brackets for the denotation of substitution,
they may be used informally to indicate that certain
types and/or terms are a part of a term, e.g.,
$t\left[?\alpha\right]$, $t\left[\sigma, c_{\tau}\right]$.
›
subsubsection‹Relativization Algorithm\label{sec:ra}›
text‹
Let ${}_{\alpha}(\beta \approx A)_{\mathsf{Rep}}^{\mathsf{Abs}}$ denote
\[
\begin{aligned}
& (\forall x_{\beta}. \mathsf{Rep}\ x \in A)\ \wedge\\
& (\forall x_{\beta}. \mathsf{Abs}\ (\mathsf{Rep}\ x) = x)\ \wedge\\
& (\forall y_{\alpha}. y \in A \longrightarrow \mathsf{Rep}\ (\mathsf{Abs}\ y) = y)
\end{aligned},
\]
let $\rightsquigarrow$ denote the constant/type dependency relation
(see subsection 2.3 in \cite{blanchette_types_2016}),
let $\rightsquigarrow^{\downarrow}$
be a substitutive closure of the constant/type dependency relation,
let $R^{+}$ denote the transitive closure of
the binary relation $R$, let $\Delta_c$ denote the set of all types for which
$c$ is overloaded and let $\sigma\not\leq S $ mean that $\sigma$ is not an
instance of any type in $S$ (see \cite{blanchette_types_2016} and
\cite{yang_comprehending_2017}).
The framework Types-To-Sets extends Isabelle/HOL with two axioms:
Local Typedef Rule (LT) and the Unoverloading Rule (UO).
The consistency of Isabelle/HOL augmented with the LT and
the UO is proved in Theorem 11 in \cite{yang_comprehending_2017}.
The LT is given by
\[
\begin{aligned}
\scalebox{1.0}{
\infer[\beta \not\in A, \phi, \Gamma]{\Gamma \vdash \phi}{%
\Gamma\vdash A \neq\emptyset
& \Gamma
\vdash
\left(
\exists \mathsf{Abs}\ \mathsf{Rep}. {}_{\sigma}(\beta\approx A)_{\mathsf{Rep}}^{\mathsf{Abs}}\longrightarrow\phi
\right)
}
}
\end{aligned}
\]
Thus, if $\beta$ is fresh for the non-empty set $A_{\sigma\ \mathsf{set}}$,
the formula $\phi$ and the context $\Gamma$, then $\phi$ can be proved in
$\Gamma$ by assuming the existence of a type $\beta$ isomorphic to $A$.
The UO is given by
\[
\infer[\text{$\forall u$ in $\phi$}. \neg(u\rightsquigarrow^{\downarrow+}c_{\sigma});\ \sigma\not\leq\Delta_c]{\forall x_{\sigma}. \phi\left[x_{\sigma}/c_{\sigma}\right]}{\phi}
\]
Thus, a constant-instance $c_{\sigma}$ may be replaced by a universally
quantified variable $x_{\sigma}$ in $\phi$, if all types and
constant-instances in $\phi$ do not semantically depend on $c_{\sigma}$
through a chain of constant and type definitions and there is no
matching definition for $c_{\sigma}$.
The statement of the \textit{original relativization algorithm} (ORA) can be
found in subsection 5.4 in \cite{blanchette_types_2016}. Here, we present
a variant of the algorithm that includes some of the amendments that were
introduced in \cite{immler_smooth_2019}, which will be referred to as the
\textit{relativization algorithm} (RA).
The differences between the ORA and
the RA are implementation-specific and have no effect on the output
of the algorithm, if applied to a conventional input.
Let $\bar{a}$ denote $a_1,\ldots,a_n$ for some positive integer $n$;
let $\Upsilon$ be a type class
\cite{nipkow_type_1991,wenzel_type_1997,altenkirch_constructive_2007}
that depends on the overloaded constants $\bar{*}$ and
let $A\downarrow\bar{f}$ be used
to state that $A$ is closed under the operations $\bar{f}$;
then the RA is given by
\[
\scalebox{0.75}{
\infer[(7)]
{
\vdash ?A_{?\alpha\ \mathsf{set}} \neq\emptyset\longrightarrow
?A\downarrow?\bar{f}\left[?\alpha\right]\longrightarrow
\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ ?A\ ?\bar{f}\longrightarrow
\phi^{\mathsf{on}}_{\mathsf{with}}\left[?\alpha,?A,?\bar{f}\right]
}
{
\infer[(6)]
{
A\neq\emptyset
\vdash A\downarrow?\bar{f}\left[\alpha\right]\longrightarrow
\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ A\ ?\bar{f}\longrightarrow
\phi^{\mathsf{on}}_{\mathsf{with}}\left[\alpha,A,?\bar{f}\right]
}
{
\infer[(5)]
{
A\neq\emptyset,{}_{\alpha}(\beta\approx A)_{\mathsf{Rep}}^{\mathsf{Abs}}
\vdash A\downarrow?\bar{f}\left[\alpha\right]\longrightarrow
\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ A\ ?\bar{f}\longrightarrow
\phi^{\mathsf{on}}_{\mathsf{with}}\left[\alpha,A,?\bar{f}\right]
}
{
\infer[(4)]
{
A\neq\emptyset,{}_{\alpha}(\beta\approx A)_{\mathsf{Rep}}^{\mathsf{Abs}}
\vdash\Upsilon_{\mathsf{with}}\ ?\bar{f}\left[\beta\right]\longrightarrow
\phi_{\mathsf{with}}\left[\beta,?\bar{f}\right]
}
{
\infer[(3)]
{
A\neq\emptyset,{}_{\alpha}(\beta\approx A)_{\mathsf{Rep}}^{\mathsf{Abs}}
\vdash\Upsilon_{\mathsf{with}}\ ?\bar{f}\left[?\alpha\right]\longrightarrow
\phi_{\mathsf{with}}\left[?\alpha,?\bar{f}\right]
}
{
\infer[(2)]
{
\vdash\Upsilon_{\mathsf{with}}\ ?\bar{f}\left[?\alpha\right]\longrightarrow
\phi_{\mathsf{with}}\left[?\alpha,?\bar{f}\right]
}
{
\infer[(1)]
{\vdash\phi_{\mathsf{with}}\left[?\alpha_{\Upsilon},\bar{*}\right]}
{\vdash\phi\left[?\alpha_{\Upsilon}\right]}
}
}
}
}
}
}
}
\]
The input to the RA
is assumed to be a theorem $\vdash\phi\left[?\alpha_{\Upsilon}\right]$
such that all of its unbound term and type variables are schematic.
Step 1 will be referred to as the first step of the dictionary
construction (it is similar to the first step of the
dictionary construction, as described in subsection 5.2 in
\cite{blanchette_types_2016});
step 2 will be described as unoverloading of the type $?\alpha_{\Upsilon}$
and includes class internalization
(see subsection 5.1 in \cite{blanchette_types_2016} and
\cite{altenkirch_constructive_2007})
and the application of the UO (step 2 corresponds to the application of the
attribute @{attribute unoverload_type} that will be
described in the next subsection); step 3 provides the assumptions
\mbox{$A_{\alpha\ \mathsf{set}}\neq\emptyset$} and
\mbox{${}_{\alpha}(\beta\approx A)_{\mathsf{Rep}}^{\mathsf{Abs}}$}
(the prerequisites for the application of the LT); step 4 is reserved
for the concrete type instantiation;
step 5 refers to the application of transfer
(see section 6 in \cite{blanchette_types_2016}); step 6 refers to the
application of the LT; step 7 refers to the export of the theorem
from the local context (e.g., see \cite{wenzel_isabelle/isar_2019}).
›
subsubsection‹Implementation of Types-To-Sets\label{subsec:ITTS}›
text‹
In \cite{blanchette_types_2016}, the authors provided the first
programmatic implementation of the framework Types-To-Sets for Isabelle/HOL
in the form of several Isabelle/ML modules
(see \cite{milner_definition_1997} and \cite{wenzel_isabelle/isar_2019}).
These modules extended the
implementation of the object logic Isabelle/HOL with the
LT and UO. Moreover, they introduced several attributes that provided a
convenience layer for the application of the ORA:
@{attribute internalize_sort}, @{attribute unoverload}
and @{attribute cancel_type_definition}.
These attributes could be used to perform steps 1, 3 and 7 (respectively) of
the ORA. Other steps could be performed using the technology that already
existed, but required a significant effort and knowledge on behalf of the users
(e.g., see \cite{immler_smooth_2019}).
The examples of the application of the ORA to theorems in
Isabelle/HOL that were developed in \cite{blanchette_types_2016}
already contained an implicit suggestion that the constants and theorems
needed for the first step of the dictionary construction in step 2 of
the ORA and the transfer rules needed for step 6 of the ORA can and should
be obtained prior to the application of the algorithm. Thus, using the notation
from subsection \ref{sec:ra},
for each constant-instance $c_{\sigma}$
that occurs in the type-based theorem
$\vdash\phi\left[?\alpha_{\Upsilon}\right]$
prior to the application of the ORA with respect to
${}_{\alpha}(\beta \approx A)_{\mathsf{Rep}}^{\mathsf{Abs}}$,
the users were expected to provide
an unoverloaded constant $c_{\mathsf{with}}$ such that
$c_{\sigma} = c_{\mathsf{with}}\ \bar{*}$, and a constant $c^{\mathsf{on}}_{\mathsf{with}}$
such that $R\left[T_{\alpha\rightarrow\beta\rightarrow\mathbb{B}}\right]
\ (c^{\mathsf{on}}_{\mathsf{with}}\ A_{\alpha\ \mathsf{set}})\ c_{\mathsf{with}}$
($\mathbb{B}$ denotes the built-in Isabelle/HOL type $bool$
\cite{kuncar_types_2015})
is a conditional transfer rule (e.g., see \cite{gonthier_lifting_2013}),
with $T$ being a binary
relation that is at least right-total and bi-unique
(see \cite{kuncar_types_2015}), assuming the default order on predicates
in Isabelle/HOL.
In \cite{immler_smooth_2019}, the implementation of the framework Types-To-Sets
was amended by providing the attribute @{attribute unoverload_type},
which subsumed the functionality of the attributes
@{attribute internalize_sort} and
@{attribute unoverload}. The RA presented above already includes this
amendment.
Potentially, the unoverloaded constants $c_{\mathsf{with}}$ and the
associated theorems $c_{\sigma} = c_{\mathsf{with}}\ \bar{*}$
can be obtained via the application of the algorithm for unoverloading
of definitions that was proposed in
\cite{kaufmann_mechanized_2010}.
However, to the best knowledge of the author, a working implementation of this
\textit{classical overloading elimination algorithm}
is not publicly available for the most recent version of Isabelle.
In \cite{immler_automation_2019}, an alternative
algorithm that serves a similar purpose is provided and
made available via the interface of the Isabelle/Isar command
@{command unoverload_definition}.
Effectively, the command applies the algorithm used
in the attribute @{attribute unoverload_type}
to a definition of the constant $c$ and uses the right-hand-side
of the resulting theorem to form a definition for $c_{\mathsf{with}}$.
Thus, technically, unlike the classical overloading elimination
algorithm, this algorithm requires the axiom UO to be available and it is
not capable of unoverloading the constants that were not overloaded
using the Isabelle's type class infrastructure. Furthermore,
the command is applicable only to the definitions provided by the user,
which could be seen as an obstacle in the automation of unoverloading of
the constants that are defined using the definitional packages other
than @{command definition} (the classical overloading elimination
algorithm relies on the definitional axioms instead of arbitrary
theorems provided by the user \cite{kaufmann_mechanized_2010}).
Of course, none of these limitations hinder the usefulness of the command,
if it is applicable.
The transfer rules for the constants that are conditionally parametric
can be synthesized automatically using the existing command
@{command parametric_constant}
\cite{gilcher_conditional_2017}
that is available from the standard distribution of Isabelle;
the framework \textit{autoref} that was developed in
\cite{lammich_automatic_2013} allows for the synthesis of transfer rules
$R\ t\ t'$, including both the transfer relation $R$ and the term $t$,
based on $t'$, under favorable conditions;
lastly, in \cite{lammich_automatic_2013} and \cite{immler_smooth_2019},
the authors suggest an outline of another feasible algorithm for the
synthesis of the transfer rules based on the functionality of the framework
\textit{transfer} \cite{gonthier_lifting_2013} of Isabelle/HOL,
but do not provide an implementation (the main algorithm presented
in \cite{lammich_automatic_2013} is independent of the standard transfer
framework of Isabelle/HOL).
Lastly, the assumption ${}_{\alpha}(\beta \approx A)_{\mathsf{Rep}}^{\mathsf{Abs}}$ can be
stated using the
constant \isa{type{\isacharunderscore}definition}
from the standard library of Isabelle/HOL as
\isa{type{\isacharunderscore}definition\ $\mathsf{Rep}$\ $\mathsf{Abs}$\ $A$};
the instantiation of types required in step 4 of the RA can
be performed using the standard attributes of Isabelle;
step 6 can be performed using the attribute
@{attribute cancel_type_definition} developed in
\cite{blanchette_types_2016}; step 7 is expected to be performed manually
by the user.
›
subsection‹Purpose and scope›
text‹
The extension of the framework Types-To-Sets that is described in this manual
adds a further layer of automation to the existing implementation
of the framework Types-To-Sets. The primary functionality of the extension
is available via the following Isar commands:
@{command tts_context}, @{command tts_lemmas} and @{command tts_lemma} (and the
synonymous commands @{command tts_corollary}, @{command tts_proposition} and
@{command tts_theorem}\footnote{In what follows, any reference to the
command @{command tts_lemma} should be viewed as a reference to the
entire family of the commands with the identical functionality.}).
The commands @{command tts_lemmas} and @{command tts_lemma}, when invoked inside
an appropriately defined @{command tts_context} provide the
functionality that is approximately equivalent to the application of all
steps of the RA and several additional steps of
pre-processing of the input and post-processing of the result
(collectively referred to as the \textit{extended relativization algorithm}
or ERA).
The extension was designed under a policy of non-intervention with the
existing implementation of the framework Types-To-Sets. Therefore, it does
not reduce the scope of the applicability of the framework.
However, the functionality that is provided by the commands associated with the
extension is a proper subset of the functionality provided by the existing
implementation. Nevertheless, the author of the extension believes that there
exist very few practical applications of the relativization algorithm that
can be solved using the original interface but cannot be solved using
the commands that are introduced within the scope of the
extension.
›
text‹\newpage›
endTheory ETTS_Theory
section‹ETTS and ERA›
theory ETTS_Theory
imports ETTS_Introduction
begin
subsection‹Background›
text‹
In this section, we describe our implementation of the prototype software
framework ETTS that offers the integration of Types-To-Sets with the
Isabelle/Isar infrastructure and full automation of the application of
the ERA under favorable conditions.
The design of the framework rests largely on our
interpretation of several ideas expressed by the authors
of \cite{immler_smooth_2019}.
It has already been mentioned that the primary functionality of the ETTS
is available via the Isabelle/Isar commands
@{command tts_context}, @{command tts_lemmas} and @{command tts_lemma}.
There also exist secondary commands aimed at resolving certain specific
problems that one may encounter during relativization:
@{command tts_register_sbts} and @{command tts_find_sbts}.
More specifically, these commands provide means for using transfer rules
stated in a local context during the step of the ERA that is similar
to step 5 of the RA. The functionality of these commands is
explained in more detail in subsection \ref{sec:sbts} below.
It is important to note that the description of the ETTS presented
in this subsection is only a simplified model
of its programmatic implementation in Isabelle/ML.
›
subsection‹Preliminaries›
text‹
The ERA is an extension of the RA that
provides means for the automation of a design pattern similar
to the one that was proposed in \cite{immler_smooth_2019},
as well as several additional steps for pre-processing of the input
and post-processing of the result of the relativization.
In a certain restricted sense the ERA can be seen as
a localized form of the RA, as it provides additional infrastructure
aimed specifically at making the relativization of theorems stated in the
context of Isabelle's locales
\cite{kammuller_locales_1999, berardi_locales_2004, ballarin_locales_2014}
more convenient.
In what follows, assume the existence of an underlying
well-formed theory $D$ (and an associated HOL signature $\Sigma$)
that contains all definitional axioms that appear
in the standard library of Isabelle/HOL.
If \mbox{$\Gamma \vdash {}_{\alpha}(\beta \approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}$}
and
$\beta, U_{\alpha\ \mathsf{set}}, \mathsf{Rep}_{\beta\rightarrow\alpha}, \mathsf{Abs}_{\alpha\rightarrow\beta} \in \Gamma$,
then the 4-tuple
$(U_{\alpha\ \mathsf{set}}, \beta, \mathsf{Rep}_{\beta\rightarrow\alpha}, \mathsf{Abs}_{\alpha\rightarrow\beta})$,
will be referred to as a \textit{relativization isomorphism} (RI)
\textit{with respect to} $\Gamma$ (or RI, if $\Gamma$ can be inferred).
Given the RI
$(U_{\alpha\ \mathsf{set}},\beta,\mathsf{Rep}_{\beta\rightarrow\alpha},\mathsf{Abs}_{\alpha\rightarrow\beta})$,
the term $U_{\alpha\ \mathsf{set}}$ will be referred to as the
\textit{set associated with the RI}, $\beta$ will be referred to as the
\textit{type variable associated with the RI},
$\mathsf{Rep}_{\beta\rightarrow\alpha}$ will be referred to as the
\textit{representation associated with the RI}
and $\mathsf{Abs}_{\alpha\rightarrow\beta}$ will be referred
to as the \textit{abstraction associated with the RI}.
Moreover, any typed term variable $T_{\alpha\rightarrow\beta\rightarrow\mathbb{B}}$
such that $\Gamma \vdash T = (\lambda x\ y.\ \mathsf{Rep}\ y = x)$ will be referred to as the
\textit{transfer relation associated with the RI}.
$\Gamma \vdash Domainp\ T = (\lambda x.\ x \in U)$ that holds for
this transfer relation will be referred to as the
\textit{transfer domain rule associated with the RI},
$\Gamma \vdash bi\_ unique\ T$ and
$\Gamma \vdash right\_ total\ T$ will be referred to as the
\textit{side conditions associated with the RI}. For brevity,
the abbreviation
$\mathsf{dbr}[T_{\alpha\rightarrow\beta\rightarrow\mathbb{B}},U_{\alpha\ \mathsf{set}}]$
will be used to mean that
$Domainp\ T = (\lambda x.\ x \in U)$, $bi\_ unique\ T$
and $right\_ total\ T$ for any $\alpha$, $\beta$,
$T_{\alpha\rightarrow\beta\rightarrow\mathbb{B}}$ and $U_{\alpha\ \mathsf{set}}$.
›
subsection‹Set-based terms and their registration\label{sec:sbts}›
text‹
Perhaps, one of the most challenging aspects of the automation of the
relativization process is related to the application of transfer during
step 5 of the RA: a suitable transfer rule for a given constant-instance
may exist only under non-conventional side conditions:
an important example that showcases this issue is the built-in constant
$\varepsilon$ (see \cite{kuncar_types_2019} and \cite{immler_smooth_2019}
for further information).
Unfortunately, the ETTS does not offer a fundamental solution to this problem:
the responsibility for providing suitable transfer rules for the application
of the ERA remains at the discretion of the user.
Nonetheless, the ETTS does provide
additional infrastructure that may improve the user experience when
dealing with the transfer rules that can only be conveniently stated in
an explicitly relativized local context (usually a relativized
locale): a common problem that was already explored in
\cite{immler_smooth_2019}.
The authors of \cite{immler_smooth_2019} choose to perform the relativization
of theorems that stem from their specifications in a locale context
from within another dedicated relativized locale context.
The relativized operations that are represented either by the locale parameters
of the relativized locale or remain overloaded constants associated with
a given class constraint are lifted to the type variables associated with the
RIs that are used for the application of a variant of the relativization
algorithm. This variant includes a step during which the
variables introduced during unoverloading are substituted (albeit implicitly)
for the terms that represent the lifted locale parameters and constants.
The additional infrastructure and the additional step
are needed, primarily, for the relativization of the constants
whose transfer rules can only be stated conveniently in the context
of the relativized locale.
A similar approach is used in the ETTS. However, instead of explicitly
declaring the lifted constants in advance of the application of the RA,
the user is expected to perform the registration of the so-called
\textit{set-based term} (sbterm) for each term of interest that
is a relativization of a given concept.
The inputs to the algorithm that is associated with the registration of the
sbterms are a context $\Gamma$, a term $t : \bar{\alpha}\ K$
($K$, applied using a postfix notation, contains all information about
the type constructors of the type $\bar{\alpha}\ K$) and a
sequence of $n$ distinct typed variables $\bar{U}$ with distinct types of the
form ${\alpha\ \mathsf{set}}$, such that $\bar{\alpha}$ is also of length $n$,
all free variables and free type variables that occur in
$t : \bar{\alpha}\ K$ also appear free in $\Gamma$
and $\bar{U}_i : \bar{\alpha}_i\ \mathsf{set}$ for all $i$,
$1 \leq i \leq n$.
Firstly, a term
$
\exists b.
\ R\left[\bar{A}\right]_{\bar{\alpha}\ K \rightarrow \bar{\beta}\ K\rightarrow \mathbb{B}}\ t\ b
$
is formed, such
that $R\left[\bar{A}\right]$ is a parametricity relation associated with
some type $\bar{\gamma}\ K$ for $\bar{\gamma}$ of length $n$, such that the sets
of the elements of $\bar{\alpha}$, $\bar{\beta}$ and $\bar{\gamma}$ are pairwise
disjoint, $\bar{A}$ and $\bar{\beta}$ are both of length $n$,
the elements of $\bar{A}$, $\bar{\beta}$ and $\bar{\gamma}$
are fresh for $\Gamma$ and
$\bar{A}_i : \bar{\alpha}_i\rightarrow \bar{\beta}_i\rightarrow\mathbb{B}$
for all $i$ such that $1 \leq i \leq n$. Secondly, the context $\Gamma'$ is built
incrementally starting from $\Gamma$ by adding the formulae
$\mathsf{dbr}[\bar{A}_i, \bar{U}_i]$
for each $i$ such that $1 \leq i \leq n$.
The term presented above serves as a goal that is meant to be
discharged by the user in $\Gamma'$, resulting in the deduction
\[
\Gamma \vdash
\mathsf{dbr}[?\bar{A}_i, \bar{U}_i] \longrightarrow
\exists b.
\ R\left[?\bar{A}\right]_{\bar{\alpha}\ K \rightarrow ?\bar{\beta}\ K\rightarrow \mathbb{B}}\ t\ b
\]
(the index $i$ is distributed over $n$)
after the export to the context $\Gamma$.
Once the proof is completed, the result is registered in the so-called
\textit{sbt-database} allowing a lookup of such results by the
sbterm $t$ (the terms and results are allowed to morph
when the lookup is performed from within a context different
from $\Gamma$ \cite{kauers_context_2007}).
›
subsection‹Parameterization of the ERA\label{sec:par-ERA}›
text‹
Assuming the existence of some context $\Gamma$, the ERA is parameterized by
the \textit{RI specification}, \textit{the sbterm specification},
the \textit{rewrite rules for the set-based theorem},
the \textit{known premises for the set-based theorem},
the \textit{specification of the elimination of premises
in the set-based theorem} and
the \textit{attributes for the set-based theorem}.
A sequence of the entities in the list above will be
referred to as the \textit{ERA-parameterization for} $\Gamma$.
The \textit{RI Specification} is a finite non-empty sequence
of pairs \mbox{$\left(?\gamma, U_{\alpha\ \mathsf{set}} \right)$} of
schematic type variables $\gamma$ and the typed term variables
$U_{\alpha\ \mathsf{set}}$, such that $U_{\alpha\ \mathsf{set}} \in \Gamma$.
The individual elements of the RI specification will
be referred to as the \textit{RI specification elements}.
Given an RI specification element, any type variable that occurs
on the left hand side (LHS) of the RI specification element will be referred to as the
\textit{type variable associated with the RI specification element},
any typed term variable that occurs on the right hand side (RHS) of the RI specification
element will be referred to as the
\textit{set associated with the RI specification element}.
The type variables associated with the RI specification elements
must be distinct and the type variables of the sets associated with the
RI specification elements must be distinct.
The \textit{sbterm specification} is a finite sequence of
pairs \mbox{$(t : ?\bar{\alpha}\ K,\ u : \bar{\beta}\ K)$},
where $t$ is either a constant-instance or a
schematic typed term variable and $u$ is an sbterm with respect to $\Gamma$.
The individual elements of the sbterm specification will
be referred to as the \textit{sbterm specification elements}.
Given an sbterm specification element, any term that
occurs on the LHS of the sbterm specification element will be referred to as the
\textit{tbt associated with the sbterm specification element},
any sbterm that occurs on the RHS of the
sbterm specification element will be referred to as the
\textit{sbterm associated with the sbterm specification element}.
The \textit{rewrite rules for the set-based theorem} can be any set
of valid rules for the Isabelle simplifier \cite{wenzel_isabelle/isar_2019-1};
the \textit{known premises for the set-based theorem} can be any finite
sequence of deductions in $\Gamma$; the
\textit{specification of the elimination of premises in the set-based theorem}
is a pair $(\bar{t}, m)$, where $\bar{t}$ is a sequence of formulae and $m$
is a proof method; the \textit{attributes for the set-based theorem}
is a sequence of attributes of Isabelle (e.g., see \cite{wenzel_isabelle/isar_2019-1}).
›
subsection‹Definition of the ERA\label{sec:def-ERA}›
text‹
Assume that there exists a context $\Gamma$ and an ERA-parameterization
for $\Gamma$. A valid input to the ERA is considered to be a theorem
$\vdash\phi$ such that all variables
that occur in the theorem at the top level are schematic.
It is also assumed that there exists a (possibly empty) sequence of
schematic variables $?\bar{h}$ of length $m$ that form a subset
of the schematic variables that occur in $\phi$ and a sequence
$\bar{g}$ of sbterms in $\Gamma$ of the equivalent length, such that
$(?\bar{h}_i, \bar{g}_i)$ is an sbterm specification element of
the ERA-parameterization for all $i$ such that $1 \leq i \leq m$.
In what follows, like in the exposition of the ORA in
\cite{blanchette_types_2016} and the RA in subsection \ref{sec:ra},
for brevity it is assumed
that the set of the type variables that occur in $\phi$ is the singleton set
$\{?\alpha_{\Upsilon}\}$,
where $\Upsilon$ is a type class that depends on the sequence of
overloaded constants $\bar{*}$ of length $n$.
Thus, it is also assumed that the RI specification in the ERA-parameterization
contains exactly one RI specification element
$(?\alpha_{\Upsilon}, U_{\alpha\ \mathsf{set}})$
and that there exists a sequence of $n$ sbterms $\bar{f}$ in $\Gamma$
such that $(\bar{*}_i, \bar{f}_i)$ are sbterm specification elements
of the ERA-parameterization for all $i$ such that $1 \leq i \leq n$.
Lastly, it is assumed
that the set of all type variables of the sbterms associated with
the sbterm specification elements of the ERA-parameterization
is the singleton set $\{\alpha\}$, thence, there exist sequences $\bar{K}$
and $\bar{L}$ such that $\bar{h}_i : ?\alpha_{\Upsilon}\ \bar{K}_i$ and
$\bar{g}_i : \alpha\ \bar{K}_i$ for all $i$ such that $1 \leq i \leq m$, and
$\bar{*}_i : ?\alpha_{\Upsilon}\ \bar{L}_i$ and
$\bar{f}_i : \alpha\ \bar{L}_i$ for all $i$ such that $1 \leq i \leq n$.
The ERA can be divided in three distinct parts:
\textit{initialization of the relativization context},
\textit{kernel of the ERA} (KERA) and \textit{post-processing}.
\textbf{Initialization of the relativization context}.
Prior to the application of the relativization algorithm, the formula
$\exists \mathsf{Rep}\ \mathsf{Abs}.\ {}_{\alpha}(\beta \approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}$
is added to the context $\Gamma$, with the type variable $\beta$ being fresh
for $\Gamma$:
\mbox{$\Gamma' = \Gamma \cup \{\exists \mathsf{Rep}\ \mathsf{Abs}.\ {}_{\alpha}(\beta \approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}\}$}.
In what follows, $\Gamma'$ will be referred to as the relativization context.
Then, the properties of the Hilbert choice $\varepsilon$
are used for the definition of $\mathsf{Rep}$ and
$\mathsf{Abs}$ such that \mbox{$\Gamma' \vdash {}_{\alpha}(\beta \approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}$}
(e.g., see \cite{kuncar_types_2015}).
In this case,
\mbox{$(U_{\alpha\ \mathsf{set}},\beta,\mathsf{Rep}_{\beta\rightarrow\alpha},\mathsf{Abs}_{\alpha\rightarrow\beta})$}
is an RI with respect to $\Gamma'$.
Furthermore, a fresh $T_{\alpha\rightarrow\beta\rightarrow\mathbb{B}}$
is defined as a transfer relation associated with the RI. Finally, the
transfer domain rule associated with the RI and the side conditions associated
with the RI are proved for $T$ with respect to $\Gamma'$.
For each $\bar{g}_i$ such that \mbox{$1 \leq i \leq m$},
the sbt-database contains a deduction
\mbox{$
\Gamma \vdash\mathsf{dbr}[?A, U] \longrightarrow
\exists a.\ R\left[?A\right]_{\alpha\ \bar{K}_i \rightarrow ?\delta\ \bar{K}_i\rightarrow \mathbb{B}}\ \bar{g}_i\ a,
$}.
Thence, for each $i$ such that $1 \leq i \leq m$, $?\delta$ is instantiated
as $\beta$ and $?A_{\alpha\rightarrow?\delta\rightarrow\mathbb{B}}$ is instantiated
as $T_{\alpha\rightarrow\beta\rightarrow\mathbb{B}}$. The resulting theorems
are used for the definition of a fresh sequence $\bar{a}$ such that
\mbox{$\Gamma' \vdash R\left[T_{\alpha\rightarrow\beta\rightarrow\mathbb{B}}\right]_{\alpha\ \bar{K}_i \rightarrow
\beta\ \bar{K}_i\rightarrow \mathbb{B}}\ \bar{g}_i\ \bar{a}_i$}.
Similar deductions are also established for the sequence $\bar{f}$, with the
sequence of the elements appearing on the RHS of the transfer rule denoted
by $\bar{b}$.
These deductions are meant to be used by the transfer infrastructure during the step of the ERA that
is equivalent to step 5 of the RA, as shown below.
\textbf{Kernel of the ERA}. The KERA is similar to the
the RA:
\[
\scalebox{0.75}{
\infer[(7)]
{
\Gamma\vdash U \neq\emptyset\longrightarrow
U\downarrow\bar{g},\bar{f}\longrightarrow
\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ U\ \bar{f} \longrightarrow
\phi^{\mathsf{on}}_{\mathsf{with}}\left[\alpha,U,\bar{g},\bar{f}\right]
}
{
\infer[(6)]
{
\Gamma
\vdash \exists \mathsf{Rep}\ \mathsf{Abs}.{}_{\alpha}(\beta \approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}
\longrightarrow
U\downarrow\bar{g},\bar{f}\longrightarrow
\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ U\ \bar{f}\longrightarrow
\phi^{\mathsf{on}}_{\mathsf{with}}\left[\alpha,U,\bar{g},\bar{f}\right]
}
{
\infer[(5)]
{
\Gamma'
\vdash U\downarrow\bar{g},\bar{f}\longrightarrow
\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ U\ \bar{f}\longrightarrow
\phi^{\mathsf{on}}_{\mathsf{with}}\left[\alpha,U,\bar{g},\bar{f}\right]
}
{
\infer[(4)]
{
\Gamma'
\vdash\Upsilon_{\mathsf{with}}\ \bar{b}\longrightarrow
\phi_{\mathsf{with}}\left[\beta,\bar{a},\bar{b}\right]
}
{
\infer[(3)]
{
\Gamma'
\vdash\Upsilon_{\mathsf{with}}\ ?\bar{f}\left[\beta\right]\longrightarrow
\phi_{\mathsf{with}}\left[\beta,?\bar{h}\left[\beta\right],?\bar{f}\right]
}
{
\infer[(2)]
{
\Gamma'\vdash\Upsilon_{\mathsf{with}}\ ?\bar{f}\left[?\alpha\right]\longrightarrow
\phi_{\mathsf{with}}\left[?\alpha,?\bar{h}\left[?\alpha\right],?\bar{f}\right]
}
{
\infer[(1)]
{
\Gamma'\vdash\phi_{\mathsf{with}}\left[?\alpha_{\Upsilon}, ?\bar{h}\left[?\alpha_{\Upsilon}\right], \bar{*} \right]
}
{
\Gamma'\vdash\phi\left[?\alpha_{\Upsilon}, ?\bar{h}\left[?\alpha_{\Upsilon}\right]\right]
}
}
}
}
}
}
}
}
\]
Thus, step 1 will be referred to as the first step of the dictionary
construction (similar to step 1 of the RA);
step 2 will be referred to as unoverloading of the type $?\alpha_{\Upsilon}$:
it includes class internalization and the application of the UO
(similar to step 2 of the RA);
in step 3, $?\alpha$ is
instantiated as $\beta$ using the RI specification
(similar to step 4 in the RA);
in step 4, the sbterm specification is used for the instantiation
of $?\bar{h}$ as $\bar{a}$ and $?\bar{f}$ as $\bar{b}$;
step 5 refers to the application of transfer, including the
transfer rules associated with the sbterms
(similar to step 5 in the RA); in step 6, the result is exported from $\Gamma'$
to $\Gamma$, providing the additional premise
$\exists \mathsf{Rep}\ \mathsf{Abs}.\ {}_{\alpha}(\beta \approx U)_{\mathsf{Rep}}^{\mathsf{Abs}}$;
step 7 is the application of the attribute
@{attribute cancel_type_definition}
(similar to step 6 in the RA).
The RI specification and the sbterm specification provide the information
that is necessary to perform the type and term substitutions in steps
3 and 4 of the KERA. If the specifications are viewed as finite maps,
their domains morph along the transformations that the theorem
undergoes until step 4.
\textbf{Post-processing}. The deduction that is obtained in the final step of
the KERA can often be simplified further.
The following post-processing steps were created to allow for the presentation
of the set-based theorem in a format that is both desirable and convenient for
the usual applications:
\begin{enumerate}
\item \textit{Simplification}. The
rewriting is performed using the rewrite rules for the set-based theorem:
the implementation relies on the functionality of Isabelle's simplifier.
\item \textit{Substitution of known premises}. The known premises for the
set-based theorem are matched with the premises of the set-based theorem, allowing
for their elimination.
\item \textit{Elimination of premises}.
Each premise is matched against each term
in the specification of the elimination of premises in the set-based theorem;
the associated method is applied in an attempt to eliminate
the matching premises (this can be useful for the
elimination of the premises of the form $U \neq \emptyset$).
\item \textit{Application of the attributes for the set-based theorem}.
The attributes for the set-based theorem are applied as the
final step during post-processing.
\end{enumerate}
Generally, the desired form of the result after a successful application
of post-processing is similar to
\mbox{$\Gamma\vdash\phi^{\mathsf{on}}_{\mathsf{with}}\left[\alpha,U,\bar{g},\bar{f}\right]$}
with the premises \mbox{$U \neq \emptyset$, $U\downarrow\bar{g},\bar{f}$} and
\mbox{$\Upsilon^{\mathsf{on}}_{\mathsf{with}}\ U\ \bar{f}$} eliminated completely (these premises
can often be inferred from the context $\Gamma$).
›
text‹\newpage›
endTheory ETTS_Syntax
section‹Syntax›
theory ETTS_Syntax
imports ETTS_Theory
begin
subsection‹Background›
text ‹
This section presents the syntactic categories that are associated with the
commands @{command tts_context}, @{command tts_lemmas}, @{command tts_lemma},
and several other closely related auxiliary commands.
It is important to note that the presentation of the syntax is approximate.
›
subsection‹Registration of the set-based terms›
text‹
\begin{matharray}{rcl}
@{command_def "tts_register_sbts"} & : & ‹local_theory → proof(prove)› \\
@{command_def "tts_find_sbts"} & : & ‹context →›
\end{matharray}
┉
\<^rail>‹
@@{command tts_register_sbts} term @'|' (term + @'and')
;
@@{command tts_find_sbts} (term + @'and')
›
➧ ⬚‹tts_register_sbts› ‹t› ‹|› ‹U⇩1› ⬚‹and› ‹…› ⬚‹and› ‹U⇩n› allows for the
registration of the set-based terms in the sbt-database.
Generally, ‹U⇩i› (‹1≤i≤n›) must be distinct fixed variables with distinct types
of the form \<^typ>‹'a set›, with the set of the type variables that occur in the
types of ‹U⇩i› equivalent to the set of the type variables that occur in
the type of ‹t›.
➧ ⬚‹tts_find_sbts› ‹t⇩1› ⬚‹and› ‹…› ⬚‹and› ‹t⇩n› prints the
templates for the transfer rules for the set-based terms ‹t⇩1…t⇩n›.
›
subsection‹Relativization of theorems›
text‹
\begin{matharray}{rcl}
@{command_def "tts_context"} & : & ‹theory → local_theory› \\
@{command_def "tts_lemmas"} & : & ‹local_theory → local_theory› \\
@{command_def "tts_lemma"} & : & ‹local_theory → proof(prove)› \\
@{command_def "tts_theorem"} & : & ‹local_theory → proof(prove)› \\
@{command_def "tts_corollary"} & : & ‹local_theory → proof(prove)› \\
@{command_def "tts_proposition"} & : & ‹local_theory → proof(prove)› \\
\end{matharray}
The relativization of theorems should always be performed inside an
appropriately parameterized tts context. The tts context can be set up
using the command @{command tts_context}. The framework introduces two
types of interfaces for the application of the extended relativization
algorithm: @{command tts_lemmas} and the family of the commands with
the identical functionality: @{command tts_lemma}, @{command tts_theorem},
@{command tts_corollary}, @{command tts_proposition}. Nonetheless,
the primary purpose of the command @{command tts_lemmas} is the
experimentation and the automated generation of the relativized results stated
using the command @{command tts_lemma}.
┉
\<^rail>‹
@@{command tts_context} param @'begin'
;
@@{command tts_lemmas} ((@'!' | @'?')?) tts_facts
;
(
@@{command tts_lemma} |
@@{command tts_theorem} |
@@{command tts_corollary} |
@@{command tts_proposition}
)
(tts_short_statement | tts_long_statement)
;
param: (sets var rewriting subst eliminating app)
;
sets: (@'tts' @':' ('(' type_var @'to' term ')' + @'and'))
;
var: (@'sbterms' @':' vars)?
;
vars: ('(' term @'to' term ')' + @'and')
;
rewriting: (@'rewriting' thm)?
;
subst: (@'substituting' (thm + @'and'))?
;
eliminating: (@'eliminating' elpat? @'through' method)?
;
elpat: (term + @'and')
;
app: (@'applying' attributes)?
;
tts_short_statement: short_statement tts_addendum
;
tts_long_statement: thmdecl? context tts_conclusion
;
tts_conclusion:
(
@'shows' (props tts_addendum + @'and') |
@'obtains' obtain_clauses tts_addendum
)
;
tts_addendum: (@'given' thm | @'is' thm)
;
tts_facts: @'in' (thmdef? thms + @'and')
;
›
➧ ⬚‹tts_context param begin› provides means for the specification of a
new (unnamed) tts context.
➧ @{element "tts"}~‹:›~‹(?a⇩1› ⬚‹to› ‹U⇩1)› ⬚‹and› ‹…› ⬚‹and›
‹(?a⇩n› ⬚‹to› ‹U⇩n)› provides means for the declaration of the RI specification.
For each ‹i› (‹1≤i≤n›), ‹?a⇩i› must be a schematic type variable that
occurs in each theorem provided as an input to the commands
@{command tts_lemmas} and @{command tts_lemma} invoked inside the tts context
and ‹U⇩i› can be any term of the type \<^typ>‹'a set›, where \<^typ>‹'a›
is a fixed type variable.
➧ @{element "sbterms"}~‹:›~‹(tbcv⇩1› ⬚‹to› ‹sbt⇩1)› ⬚‹and› ‹…› ⬚‹and›
‹(tbcv⇩n› ⬚‹to› ‹sbt⇩n)› can be used for the declaration of the
sbterm specification.
For each individual entry ‹i›, such that ‹1≤i≤n›, ‹tbcv⇩i› has to be either an
overloaded operation that occurs in every theorem that is provided as an input
to the extended relativization algorithm or a schematic variable that occurs in
every theorem that is provided as an input to the command, and ‹sbt⇩i› has to be
a term registered in the sbt-database.
➧ @{element "rewriting"} ‹thm› provides means for the declaration
of the rewrite rules for the set-based theorem.
➧ @{element "substituting"} ‹thm⇩1› ⬚‹and› ‹…› ⬚‹and› ‹thm⇩n› provides
means for the declaration of the known premises for the set-based theorem.
➧ @{element "eliminating"} ‹term⇩1› ⬚‹and› ‹…› ⬚‹and› ‹term⇩n›
@{element "through"} ‹method› provides means for the declaration of
the specification of the elimination of premises in the set-based theorem.
➧ @{element "applying"} ‹[attr⇩1, …, attr⇩n]› provides means for
the declaration of the attributes for the set-based theorem.
➧ ⬚‹tts_lemmas› applies the ERA to a list
of facts and saves the resulting set-based facts in the context.
The command @{command tts_lemmas} should always be invoked from within a
tts context. If the statement of the command is followed immediately by the
optional keyword @{element "!"}, then it operates in the verbose mode,
printing the output of the application of the individual steps of the
programmatic implementation of the algorithm. If the statement of the command
is followed immediately by the optional keyword @{element "?"}, then
the command operates in the active mode, outputting the set-based facts
in the form of the ``active areas'' that can be embedded in the Isabelle
theory file inside the tts context from which the command @{command tts_lemmas}
was invoked. There is a further minor difference between the active mode
and the other two modes of operation that is elaborated upon within the
description of the keyword @{element "in"} below.
➧ @{element "in"} ‹sbf⇩1 = tbf⇩1› ⬚‹and› ‹…› ⬚‹and› ‹sbf⇩n = tbf⇩n› is used for
the specification of the type-based theorems and the output of the command.
For each individual entry ‹i›, such that ‹1≤i≤n›, ‹tbf⇩i› is used for
the specification of the input of the extended relativization algorithm and
‹sbf⇩i› is used for the specification of the name binding for the output of
the extended relativization algorithm.
The specification of the output is optional: if ‹sbf⇩i› is omitted, then a
default specification of the output is inferred automatically. ‹tbf⇩i› must
be a schematic fact available in the context, whereas ‹sbf⇩i› can be any
fresh name binding. Optionally, it is possible to provide attributes for
each individual input and output, e.g., ‹sbf⇩i[sbf_attrb] = tbf⇩i[tbf_attrb]›.
In this case, the list of the attributes ‹tbf_attrb› is applied to ‹tbf⇩i›
during the first part (initialization of the relativization context)
of the ERA. If the command operates in the active
mode, then the attributes ‹sbf_attrb› are included in the active area output,
but not added to the list of the set-based attributes.
For other modes of operation, the attributes ‹sbf_attrb› are added to the list
of the set-based attributes and applied during the third part (post-processing)
of the ERA.
➧ ⬚‹tts_lemma›~‹a: φ› @{syntax "tts_addendum"}, enters proof mode with
the main goal formed by an application of a tactic that depends on the
settings specified in @{syntax "tts_addendum"} to ‹φ›. Eventually, this results
in some fact ‹⊢φ› to be put back into the target context. The command
should always be invoked from within a tts context.
➧ A @{syntax tts_long_statement} is similar to the standard
@{syntax long_statement} in that it allows to build up an initial proof
context for the subsequent claim incrementally. Similarly,
@{syntax tts_short_statement} can be viewed as a natural extension of the
standard @{syntax short_statement}.
➧ @{syntax "tts_addendum"} is used for the specification of the
pre-processing strategy of the goal ‹φ›. \mbox{‹φ› ⬚‹is› ‹thm›} applies the
extended relativization algorithm to ‹thm›. If the term that is associated
with the resulting set-based theorem is ‹α›-equivalent to the term associated
with the goal ‹φ›, then a specialized tactic solves the main goal, leaving
only a trivial goal in its place (the trivial goal can be solved using the
terminal proof \mbox{step \textbf{.}}). \mbox{‹φ› ⬚‹given› ‹thm›} also applies the
extended relativization algorithm to ‹thm›, but the resulting set-based theorem
is merely added as a premise to the goal ‹φ›.
›
text‹\newpage›
endTheory ETTS_Examples
section‹ETTS by example›
theory ETTS_Examples
imports
ETTS_Syntax
Complex_Main
begin
subsection‹Background›
text‹
In this section, some of the capabilities of the extension of the framework
Types-To-Sets are demonstrated by example. The examples that are presented
in this section are expected to be sufficient to begin an independent
exploration of the extension, but do not cover the entire spectrum of its
functionality.
›
subsection‹First steps›
subsubsection‹Problem statement›
text‹
Consider the task of the relativization of the type-based theorem
@{thm [source] topological_space_class.closed_Un} from the standard library
of Isabelle/HOL:
\begin{center}
@{thm [names_short = true] topological_space_class.closed_Un[no_vars]},
\end{center}
where ‹S::'a::topological_space set› and ‹T::'a::topological_space set›.
›
subsubsection‹Unoverloading›
text‹
The constant @{const closed} that occurs in the theorem is an overloaded
constant defined as \mbox{@{thm [names_short = true] closed_def[no_vars]}}
for any @{term [show_sorts] "S::'a::topological_space set"}.
The constant may be unoverloaded with
the help of the command @{command ud} that is provided as part of
the framework ``Conditional Transfer Rule'' (CTR):
›
ud ‹topological_space.closed›
ud closed' ‹closed›
text‹
This invocation declares the constant @{const closed.with} that is defined as
\begin{center}
@{thm closed.with_def[no_vars]}
\end{center}
and provides the theorem @{thm [source] closed.with}
\begin{center}
@{thm closed.with[no_vars]}
\end{center}
that establishes the relationship between the unoverloaded constant
and the overloaded constant. The theorem @{thm [source] closed.with}
is automatically added to the dynamic fact @{thm [source] ud_with}.
›
subsubsection‹Conditional transfer rules›
text‹
Before the relativization can be performed, the transfer rules need to be
available for each constant that occurs in the type-based theorem
immediately after step 4 of the KERA.
All binary relations that are used in the transfer rules must be
at least right total and bi-unique (assuming the default order on predicates in
Isabelle/HOL). For the theorem
@{thm [source] topological_space_class.closed_Un}, there are two such constants:
@{const class.topological_space} and @{const closed.with}.
The transfer rules can be obtained with the help of the command @{command ctr}
from the framework CTR. The process may involve
the synthesis of further ``relativized'' constants, as described in the
reference manual for the framework CTR.
›
ctr
relativization
synthesis ctr_simps
assumes [transfer_domain_rule]: "Domainp A = (λx. x ∈ U)"
and [transfer_rule]: "right_total A" "bi_unique A"
trp (?'a A)
in topological_space_ow: class.topological_space_def
and closed_ow: closed.with_def
subsubsection‹Relativization›
text‹
As mentioned previously, the relativization of theorems can only
be performed from within a suitable tts context. In setting up the tts context,
the users always need to provide the RI specification elements that are
compatible with the theorems that are meant to be relativized in the
tts context. The set of the schematic type variables that occur in the theorem
@{thm [source] topological_space_class.closed_Un} is ‹{›‹?'a›‹}›.
Thus, there needs to be exactly one RI specification element of the form
(‹?'a›, @{term [show_types] "U::'a set"}):
›
tts_context
tts: (?'a to ‹U::'a set›)
begin
text‹
The relativization can be performed by invoking the command
@{command tts_lemmas} in the following manner:
›
tts_lemmas? in closed_Un' = topological_space_class.closed_Un
text‹
In this case, the command was invoked in the active mode, providing
an active area that can be used to insert the following theorem directly
into the theory file:
›
tts_lemma closed_Un':
assumes "U ≠ {}"
and "∀x∈S. x ∈ U"
and "∀x∈T. x ∈ U"
and "topological_space_ow U opena"
and "closed_ow U opena S"
and "closed_ow U opena T"
shows "closed_ow U opena (S ∪ T)"
is topological_space_class.closed_Un.
text‹
The invocation of the command @{command tts_lemmas} in the
active mode can be removed with no effect on the theorems that
were generated using the command.
›
end
text‹
While our goal was achieved, that is, the theorem
@{thm [source] closed_Un'} is, indeed, a relativization
of the theorem @{thm [source] topological_space_class.closed_Un},
something does not appear right. Is the assumption ‹U ≠ {}› necessary?
Is it possible to simplify ‹∀x∈S. x ∈ U›? Is it necessary to
use the such contrived name for the denotation of an open set?
Of course, all of these
issues can be resolved by restating the theorem in the form that we would like
to see and using @{thm [source] closed_Un'} in the proof of this theorem,
e.g.
›
lemma closed_Un'':
assumes "S ⊆ U"
and "T ⊆ U"
and "topological_space_ow U τ"
and "closed_ow U τ S"
and "closed_ow U τ T"
shows "closed_ow U τ (S ∪ T)"
using assms
unfolding topological_space_ow_def
by (cases ‹U = {}›) (auto simp: assms(3) closed_Un' subset_iff)
text‹
However, having to restate the theorem presents a grave inconvenience.
This can be avoided by using a different format of the @{syntax tts_addendum}:
›
tts_context
tts: (?'a to ‹U::'a set›)
begin
tts_lemma closed_Un''':
assumes "S ⊆ U"
and "T ⊆ U"
and "topological_space_ow U τ"
and "closed_ow U τ S"
and "closed_ow U τ T"
shows "closed_ow U τ (S ∪ T)"
given topological_space_class.closed_Un
proof(cases ‹U = {}›)
case False assume prems[OF False]:
"⟦
U ≠ {};
∀x∈S. x ∈ U;
∀x∈T. x ∈ U;
topological_space_ow U τ;
closed_ow U τ S;
closed_ow U τ T
⟧ ⟹ closed_ow U τ (S ∪ T)"
for U :: "'a set" and S T τ
from prems show ?thesis using assms by blast
qed simp
end
text‹
Nevertheless, could there still be some space for improvement?
It turns out that instead of having to state
the theorem in the desired form manually, often enough, it suffices
to provide additional parameters for post-processing
of the raw set-based theorem, as demonstrated in the code below:
›
tts_context
tts: (?'a to ‹U::'a set›)
rewriting ctr_simps
eliminating ‹?U≠{}› through (auto simp: topological_space_ow_def)
applying[of _ _ _ τ]
begin
tts_lemma closed_Un'''':
assumes "S ⊆ U"
and "T ⊆ U"
and "topological_space_ow U τ"
and "closed_ow U τ S"
and "closed_ow U τ T"
shows "closed_ow U τ (S ∪ T)"
is topological_space_class.closed_Un.
end
text‹
Finding the most suitable set of parameters for post-processing of the
result of the relativization is an iterative process and requires practice
before fluency can be achieved.
›
text‹\newpage›
endTheory Introduction
chapter‹ETTS Case Studies: Introduction›
theory Introduction
imports Types_To_Sets_Extension.ETTS_Auxiliary
begin
section‹Background›
subsection‹Purpose›
text‹
The remainder of this document presents several examples of the application
of the extension of the framework Types-To-Sets and provides the potential
users of the extension with a plethora of design
patterns to choose from for their own applied relativization needs.
›
subsection‹Related work›
text‹
Since the publication of the framework Types-To-Sets in
\cite{blanchette_types_2016}, there has been a growing interest
in its use in applied formalization. Some
of the examples of the application of the framework include
\cite{divason_perron-frobenius_2016},
\cite{maletzky_hilberts_2019} and \cite{immler_smooth_2019}. However, this
list is not exhaustive. Arguably, the most significant application example
was developed in \cite{immler_smooth_2019}, where Fabian Immler and
Bohua Zhan performed the
relativization of over 200 theorems from the standard mathematics library
of Isabelle/HOL.
Nonetheless, it is likely that the work presented in this document
is the first significant application of the ETTS:
unsurprisingly, the content this document was developed
in parallel with the extension of the framework itself. Also, perhaps, it is
the largest application of the framework Types-To-Sets up to this date:
only one of the three libraries (SML Relativization) presented in the
context of this work contains the relativization of over 800 theorems
from the standard library of Isabelle/HOL.
›
section‹Examples: overview›
subsection‹Background›
text‹
The examples that are presented in this document were developed for the
demonstration of the impact of various aspects of the relativization process
on the outcome of the relativization.
Three libraries of relativized results were developed in the context
of this work:
\begin{itemize}
\item \textit{SML Relativization}: a relativization
of elements of the standard mathematics library of Isabelle/HOL
\item \textit{TTS Vector Spaces}: a renovation of the set-based
library that was developed in \cite{immler_smooth_2019} using the ETTS
instead of the existing interface for Types-To-Sets
\item \textit{TTS Foundations}: a relativization of a miniature type-based
library with every constant being parametric under the side
conditions compatible with Types-To-Sets
\end{itemize}
›
subsection‹SML Relativization›
text‹
The standard library that is associated with the
object logic Isabelle/HOL and provided as a part of the
standard distribution of Isabelle \cite{noauthor_isabellehol_2020}
contains a significant number of formalized results from a variety of
fields of mathematics. However, the formalization is performed using a
type-based approach: for example, the carrier sets associated with the
algebraic structures and the underlying sets of the topological spaces
consist of all terms of an arbitrary type.
The ETTS was applied to the relativization of a certain number of results from
the standard library.
The results that are formalized in the library
SML Relativization are taken from an array of topics that include
order theory, group theory, ring theory and topology.
However, only the
results whose relativization could be nearly fully automated using
the frameworks UD, CTR and ETTS with almost no additional proof effort
are included.
›
subsection‹TTS Vector Spaces›
text‹
The TTS Vector Spaces is a remake of the library of relativized results that
was developed in \cite{immler_smooth_2019} using the ETTS.
The theorems that are provided in the library TTS Vector Spaces are nearly
identical to the results that are provided in \cite{immler_smooth_2019}.
A detailed description of the original library has already
been given in \cite{immler_smooth_2019} and will not be restated.
The definitional frameworks that are used in \cite{immler_smooth_2019}
and the TTS Vector Spaces are similar. While the unoverloading
of most of the constants could be performed by using the
command @{command ud}, the command @{command ctr} could not
be used to establish that the unoverloaded constants are
parametric under a suitable set of side conditions. Therefore,
like in \cite{immler_smooth_2019}, the proofs of the transfer rules were
performed manually. However, the advantages
of using the ETTS become apparent during the relativization of
theorems: the complex infrastructure that was needed for
compiling out dependencies on overloaded constants, the manual invocation of the
attributes related to the individual steps of the relativization algorithm,
the repeated explicit references to the theorem as it undergoes the
transformations associated with the individual steps of
a the relativization algorithm, the explicitly stated names of the set-based
theorems were no longer needed. Furthermore, the theorems synthesized by the
ETTS in TTS Vector Spaces appear in the formal proof documents in a format
that is similar to the canonical format of the Isabelle/Isar declarations
associated with the standard commands such as @{command lemma}.
›
subsection‹TTS Foundations›
text‹
The most challenging aspect of the relativization
process, perhaps, is related to the availability of the transfer rules for the
constants in the type-based theorems. Nonetheless, even if the transfer
rules are available, having to use the relativized constants in the set-based
theorems that are different from the original constants that are used in the
type-based theorems can be seen as unnatural and inconvenient.
Unfortunately, the library SML Relativization suffers from both
of the aforementioned problems. The library that was
developed in \cite{immler_smooth_2019}
(hence, also the library TTS Vector Spaces)
suffers, primarily, from the former problem, but, arguably, due to the methodology
that was chosen for the relativization, the library has a more restricted scope
of applicability.
The library TTS Foundations provides an example of a miniature
type-based library such that all constants associated with the operations on
mathematical structures (effectively, this excludes the
constants associated with the locale predicates)
in the library are parametric under the side conditions
compatible with Types-To-Sets. The relativization is
performed with respect to all possible type variables; in this case,
the type classes are not used in the type-based library. Currently,
the library includes the results from the areas of order theory and
semigroups. However, it is hoped that it can be seen
that the library can be easily extended to include most of the
content that is available in the main library of Isabelle/HOL.
The library TTS Foundations demonstrates that the development of a
set-based library can be nearly fully automated using the
existing infrastructure associated with the UD, CTR and ETTS,
and requires almost no explicit proofs on
behalf of the users of these frameworks.›
endTheory SML_Introduction
chapter‹SML Relativization›
theory SML_Introduction
imports "../Introduction"
begin
end
Theory Set_Ext
section‹Extension of the theory \<^text>‹Set››
theory Set_Ext
imports Main
begin
lemma set_comp_pair: "{f t r |t r. P t r} = {x. ∃t r. P t r ∧ x = (f t r)}"
by auto
lemma image_iff': "(∀x∈A. f x ∈ B) = (f ` A ⊆ B)" by auto
text‹\newpage›
endTheory Lifting_Set_Ext
section‹Extension of the theory \<^text>‹Lifting_Set››
theory Lifting_Set_Ext
imports Main
begin
context
includes lifting_syntax
begin
lemma set_pred_eq_transfer[transfer_rule]:
assumes "right_total A"
shows
"((rel_set A ===> (=)) ===> (rel_set A ===> (=)) ===> (=))
(λX Y. ∀s⊆Collect (Domainp A). X s = Y s)
((=)::['b set ⇒ bool, 'b set ⇒ bool] ⇒ bool)"
proof(intro rel_funI)
let ?sA = "Collect (Domainp A)"
fix X Y :: "'a set ⇒ bool"
fix X' Y' :: "'b set ⇒ bool"
assume rs: "(rel_set A ===> (=)) X X'" "(rel_set A ===> (=)) Y Y'"
show "(∀s⊆?sA. X s = Y s) = (X' = Y')"
proof
assume X_eq_Y: "∀s⊆?sA. X s = Y s"
{
fix s' assume "X' s'"
then obtain s where "rel_set A s s'"
by (meson assms right_total_def right_total_rel_set)
then have "X s" using rs(1) unfolding rel_fun_def by (simp add: ‹X' s'›)
moreover from ‹rel_set A s s'› have "s ⊆ ?sA"
unfolding Ball_Collect[symmetric] by (auto dest: rel_setD1)
ultimately have "Y' s'"
using rs(2)[unfolded rel_fun_def] ‹rel_set A s s'› by (simp add: X_eq_Y)
}
note XY = this
{
fix s' assume "Y' s'"
then obtain s where "rel_set A s s'"
by (meson assms right_total_def right_total_rel_set)
then have "Y s" using rs(2)[unfolded rel_fun_def] by (simp add: ‹Y' s'›)
moreover from ‹rel_set A s s'› have "s ⊆ ?sA"
unfolding Ball_Collect[symmetric] by (auto dest: rel_setD1)
ultimately have "X' s'"
using X_eq_Y rs(1)[unfolded rel_fun_def] ‹rel_set A s s'› by auto
}
with XY show "X' = Y'" by auto
next
assume "X' = Y'" show "∀s⊆?sA. X s = Y s"
unfolding Ball_Collect[symmetric]
using rs[unfolded rel_fun_def] ‹X' = Y'› by (metis DomainpE Domainp_set)+
qed
qed
private lemma vimage_fst_transfer_h:
"
pred_prod (Domainp A) (Domainp B) x =
(x ∈ Collect (Domainp A) × Collect (Domainp B))
"
unfolding pred_prod_beta mem_Times_iff by simp
lemma vimage_fst_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "right_total A" "right_total B"
shows
"((rel_prod A B ===> A) ===> rel_set A ===> rel_set (rel_prod A B))
(λf S. (f -` S) ∩ ((Collect (Domainp A)) × (Collect (Domainp B))))
vimage"
unfolding vimage_def
apply transfer_prover_start
apply transfer_step+
unfolding vimage_fst_transfer_h
by auto
lemma vimage_snd_transfer[transfer_rule]:
assumes [transfer_rule]: "right_total A" "bi_unique B" "right_total B"
shows
"((rel_prod A B ===> B) ===> rel_set B ===> rel_set (rel_prod A B))
(λf S. (f -` S) ∩ ((Collect (Domainp A)) × (Collect (Domainp B))))
vimage"
unfolding vimage_def
apply transfer_prover_start
apply transfer_step+
unfolding vimage_fst_transfer_h by auto
lemma vimage_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique B" "right_total A"
shows
"((A ===> B) ===> (rel_set B) ===> rel_set A)
(λf s. (vimage f s) ∩ (Collect (Domainp A))) (-`)"
by transfer_prover
lemma pairwise_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A"
shows "((A ===> A ===> (=)) ===> rel_set A ===> (=)) pairwise pairwise"
unfolding pairwise_def by transfer_prover
lemma disjnt_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A"
shows "(rel_set A ===> rel_set A ===> (=)) disjnt disjnt"
unfolding disjnt_def by transfer_prover
lemma bij_betw_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "bi_unique B"
shows "((A ===> B) ===> rel_set A ===> rel_set B ===> (=)) bij_betw bij_betw"
unfolding bij_betw_def
apply transfer_prover_start
apply transfer_step+
by simp
end
text‹\newpage›
endTheory Product_Type_Ext
section‹Extension of the theory \<^text>‹Product_Type_Ext››
theory Product_Type_Ext
imports Main
begin
context
includes lifting_syntax
begin
lemma Sigma_transfer[transfer_rule]:
assumes [transfer_rule]: "right_total A"
shows
"(rel_set A ===> (A ===> rel_set B) ===> rel_set (rel_prod A B))
Sigma Sigma"
unfolding Sigma_def by transfer_prover
end
text‹\newpage›
endTheory Transfer_Ext
section‹Extension of the theory \<^text>‹Transfer››
theory Transfer_Ext
imports Main
begin
lemma bi_unique_intersect_r:
assumes "bi_unique T"
and "rel_set T a a'"
and "rel_set T b b'"
and "rel_set T (a ∩ b) xr"
shows "a' ∩ b' = xr"
proof -
{
fix x assume "x ∈ a ∩ b"
then have "x ∈ a" and "x ∈ b" by simp+
from assms(2) ‹x ∈ a› have "∃y ∈ a'. T x y" by (rule rel_setD1)
moreover from assms(3) ‹x ∈ b› have "∃y ∈ b'. T x y" by (rule rel_setD1)
ultimately have "∃y ∈ a' ∩ b'. T x y"
using assms(1) by (auto dest: bi_uniqueDr)
}
note unique_r = this
{
fix x assume "x ∈ a' ∩ b'"
then have "x ∈ a'" and "x ∈ b'" by simp+
from assms(2) ‹x ∈ a'› have "∃y ∈ a. T y x" by (rule rel_setD2)
moreover from assms(3) ‹x ∈ b'› have "∃y ∈ b. T y x" by (rule rel_setD2)
ultimately have "∃y ∈ a ∩ b. T y x"
using assms(1) by (auto dest: bi_uniqueDl)
}
with unique_r have "rel_set T (a ∩ b) (a' ∩ b')" using rel_setI by blast
with assms(1,4) show ?thesis by (auto dest: bi_uniqueDr bi_unique_rel_set)
qed
lemma bi_unique_intersect_l:
assumes "bi_unique T"
and "rel_set T a a'"
and "rel_set T b b'"
and "rel_set T xl (a' ∩ b')"
shows "a ∩ b = xl"
proof -
let ?T' = "λ y x. T x y"
from assms(1) have "bi_unique ?T'" unfolding bi_unique_def by simp
moreover from assms(2) have "rel_set ?T' a' a" unfolding rel_set_def by simp
moreover from assms(3) have "rel_set ?T' b' b" unfolding rel_set_def by simp
moreover from assms(4) have "rel_set ?T' (a' ∩ b') xl"
unfolding rel_set_def by simp
ultimately show ?thesis by (rule bi_unique_intersect_r)
qed
lemma bi_unique_intersect:
assumes "bi_unique T" and "rel_set T a a'" and "rel_set T b b'"
shows "rel_set T (a ∩ b) (a' ∩ b')"
proof -
{
fix xl assume "xl ∈ a ∩ b"
then have "xl ∈ a" and "xl ∈ b" by simp+
with assms(3) obtain xr where "T xl xr" unfolding rel_set_def by auto
with assms(1,2) ‹xl ∈ a› have "xr ∈ a'"
by (auto dest: bi_uniqueDr rel_setD1)
moreover with assms(1,3) ‹xl ∈ b› ‹T xl xr› have "xr ∈ b'"
by (auto dest: bi_uniqueDr rel_setD1)
ultimately have "xr ∈ a' ∩ b'" by simp
with ‹T xl xr› have "∃xr. xr ∈ a' ∩ b' ∧ T xl xr" by auto
}
then have prem_lhs: "∀xl ∈ a ∩ b. ∃xr. xr ∈ a' ∩ b' ∧ T xl xr" by simp
{
fix xr
assume "xr ∈ a' ∩ b'"
then have "xr ∈ a'" and "xr ∈ b'" by simp+
with assms(3) obtain xl where "T xl xr" unfolding rel_set_def by auto
with assms(1,2) ‹xr ∈ a'› have "xl ∈ a"
by (auto dest: bi_uniqueDl rel_setD2)
moreover with assms(1,3) ‹xr ∈ b'› ‹T xl xr› have "xl ∈ b"
by (auto dest: bi_uniqueDl rel_setD2)
ultimately have "xl ∈ a ∩ b" by simp
with ‹T xl xr› have "∃xl. xl ∈ a ∩ b ∧ T xl xr" by auto
}
then have prem_rhs: "∀xr ∈ a' ∩ b'. ∃xl. xl ∈ a ∩ b ∧ T xl xr" by simp
from prem_lhs prem_rhs show ?thesis unfolding rel_set_def by auto
qed
lemma bi_unique_union_r:
assumes "bi_unique T"
and "rel_set T a a'"
and "rel_set T b b'"
and "rel_set T (a ∪ b) xr"
shows "a' ∪ b' = xr"
proof -
{
fix x assume "x ∈ a ∪ b"
then have "x ∈ a ∨ x ∈ b" by simp
from assms(2) have "∃y ∈ a'. T x y" if "x ∈ a"
using that by (rule rel_setD1)
moreover from assms(3) have "∃y ∈ b'. T x y" if "x ∈ b"
using that by (rule rel_setD1)
ultimately have "∃y ∈ a' ∪ b'. T x y" using ‹x ∈ a ∨ x ∈ b› by auto
}
note unique_r = this
{
fix x assume "x ∈ a' ∪ b'"
then have "x ∈ a' ∨ x ∈ b'" by simp
from assms(2) have "∃y ∈ a. T y x" if "x ∈ a'"
using that by (rule rel_setD2)
moreover from assms(3) have "∃y ∈ b. T y x" if "x ∈ b'"
using that by (rule rel_setD2)
ultimately have "∃y ∈ a ∪ b. T y x" using ‹x ∈ a' ∨ x ∈ b'› by auto
}
with unique_r have "rel_set T (a ∪ b) (a' ∪ b')" by (auto intro: rel_setI)
with assms(1,4) show ?thesis by (auto dest: bi_uniqueDr bi_unique_rel_set)
qed
lemma bi_unique_union_l:
assumes "bi_unique T"
and "rel_set T a a'"
and "rel_set T b b'"
and "rel_set T xl (a' ∪ b')"
shows "a ∪ b = xl"
proof -
let ?T' = "λy x. T x y"
from assms(1) have "bi_unique ?T'" unfolding bi_unique_def by simp
moreover from assms(2) have "rel_set ?T' a' a" unfolding rel_set_def by simp
moreover from assms(3) have "rel_set ?T' b' b" unfolding rel_set_def by simp
moreover from assms(4) have "rel_set ?T' (a' ∪ b') xl"
unfolding rel_set_def by simp
ultimately show ?thesis by (rule bi_unique_union_r)
qed
lemma bi_unique_union:
assumes "bi_unique T" and "rel_set T a a'" and "rel_set T b b'"
shows "rel_set T (a ∪ b) (a' ∪ b')"
proof -
{
fix xl assume "xl ∈ a ∪ b"
with assms(2,3) obtain xr where "T xl xr" unfolding rel_set_def by auto
with assms ‹xl ∈ a ∪ b› have "xr ∈ a' ∪ b'"
unfolding bi_unique_def using Un_iff by (metis Un_iff rel_setD1)
with ‹T xl xr› have "∃xr. xr ∈ a' ∪ b' ∧ T xl xr" by auto
}
then have prem_lhs: "∀xl ∈ a ∪ b. ∃xr. xr ∈ a' ∪ b' ∧ T xl xr" by simp
{
fix xr assume "xr ∈ a' ∪ b'"
with assms(2,3) obtain xl where "T xl xr" unfolding rel_set_def by auto
with assms ‹xr ∈ a' ∪ b'› have "xl ∈ a ∪ b"
unfolding bi_unique_def by (metis Un_iff rel_setD2)
with ‹T xl xr› have "∃xl. xl ∈ a ∪ b ∧ T xl xr" by auto
}
then have prem_rhs: "∀xr ∈ a' ∪ b'. ∃xl. xl ∈ a ∪ b ∧ T xl xr" by simp
from prem_lhs prem_rhs show ?thesis unfolding rel_set_def by auto
qed
lemma bi_unique_Union_r:
fixes T :: "['a, 'b] ⇒ bool" and K
defines K': "K' ≡ {(x, y). rel_set T x y} `` K"
assumes "bi_unique T"
and "⋃K ⊆ Collect (Domainp T)"
and "rel_set T (⋃K) xr"
shows "⋃K' = xr"
proof -
{
fix x assume "x ∈ ⋃K"
then obtain k where "x ∈ k" and "k ∈ K" by clarsimp
from assms have ex_k'_prem: "∀k ∈ K. ∀x ∈ k. ∃x'. T x x'" by auto
define k' where k': "k' = {x'. ∃x ∈ k. T x x'}"
have "rel_set T k k'"
unfolding rel_set_def Bex_def k'
using ‹k ∈ K› by (blast dest: ex_k'_prem[rule_format])
with ‹k ∈ K› have "k' ∈ K'" unfolding K' by auto
from ‹rel_set T k k'› ‹x ∈ k› obtain y where "y ∈ k' ∧ T x y"
by (auto dest: rel_setD1)
then have "∃y ∈ ⋃K'. T x y" using ‹k' ∈ K'› by auto
}
note unique_r = this
{
fix x' assume "x' ∈ ⋃K'"
then obtain k' where "x' ∈ k'" and "k' ∈ K'" by clarsimp
then have ex_k_prem: "∀k' ∈ K'. ∀x ∈ k'. ∃x. T x x'"
unfolding K' by (auto dest: rel_setD2)
define k where k: "k = {x. ∃x' ∈ k'. T x x'}"
have "rel_set T k k'"
unfolding rel_set_def Bex_def k
using ‹k' ∈ K'› K' by (blast dest: rel_setD2)
from assms(2) have "bi_unique (rel_set T)" by (rule bi_unique_rel_set)
with ‹rel_set T k k'› have "∃!k. rel_set T k k'" by (auto dest: bi_uniqueDl)
with ‹rel_set T k k'› K' ‹k' ∈ K'› have "k ∈ K" by auto
from ‹rel_set T k k'› ‹x' ∈ k'› obtain y where "y ∈ k ∧ T y x'"
by (auto dest: rel_setD2)
then have "∃y ∈ ⋃K. T y x'" using ‹k ∈ K› by auto
}
with unique_r have "rel_set T (⋃K) (⋃K')" by (intro rel_setI)
with assms(2,4) show ?thesis by (auto dest: bi_uniqueDr bi_unique_rel_set)
qed
lemma bi_unique_Union_l:
fixes T :: "['a, 'b] ⇒ bool" and K'
defines K: "K ≡ {(x, y). rel_set (λ y x. T x y) x y} `` K'"
assumes "bi_unique T"
and "⋃K' ⊆ Collect (Rangep T)"
and "rel_set T xl (⋃K')"
shows "⋃K = xl"
proof -
let ?T' = "λ y x. T x y"
from assms(2) have "bi_unique ?T'" unfolding bi_unique_def by simp
moreover from assms(3) have "⋃K' ⊆ Collect (Domainp ?T')" by blast
moreover from assms(4) have "rel_set ?T' (⋃K') xl"
unfolding rel_set_def by simp
ultimately have "⋃({(x, y). rel_set ?T' x y} `` K') = xl"
by (rule bi_unique_Union_r)
thus ?thesis using K by simp
qed
context
includes lifting_syntax
begin
text‹
The lemma \<^text>‹Domainp_applyI› was adopted from the lemma with the
identical name in the theory \<^text>‹Types_To_Sets/Group_on_With.thy›.
›
lemma Domainp_applyI:
includes lifting_syntax
shows "(A ===> B) f g ⟹ A x y ⟹ Domainp B (f x)"
by (auto simp: rel_fun_def)
lemma Domainp_fun:
assumes "left_unique A"
shows
"Domainp (rel_fun A B) =
(λf. f ` (Collect (Domainp A)) ⊆ (Collect (Domainp B)))"
proof-
have
"pred_fun (Domainp A) (Domainp B) =
(λf. f ` (Collect (Domainp A)) ⊆ (Collect (Domainp B)))"
by (simp add: image_subset_iff)
from Domainp_pred_fun_eq[OF ‹left_unique A›, of B, unfolded this]
show ?thesis .
qed
lemma Bex_fun_transfer[transfer_rule]:
assumes "bi_unique A" "right_total B"
shows
"(((A ===> B) ===> (=)) ===> (=))
(Bex (Collect (λf. f ` (Collect (Domainp A)) ⊆ (Collect (Domainp B)))))
Ex"
proof-
from assms(1) have "left_unique A" by (simp add: bi_unique_alt_def)
note right_total_BA[transfer_rule] =
right_total_fun[
OF conjunct2[OF bi_unique_alt_def[THEN iffD1, OF assms(1)]] assms(2)
]
show ?thesis
unfolding Domainp_fun[OF ‹left_unique A›, symmetric]
by transfer_prover
qed
end
text‹\newpage›
endTheory SML_Relations
section‹Relativization of the results about relations›
theory SML_Relations
imports Main
begin
subsection‹Definitions and common properties›
context
notes [[inductive_internals]]
begin
inductive_set trancl_on :: "['a set, ('a × 'a) set] ⇒ ('a × 'a) set"
(‹on _/ (_⇧+)› [1000, 1000] 999)
for U :: "'a set" and r :: "('a × 'a) set"
where
r_into_trancl[intro, Pure.intro]:
"⟦ a ∈ U; b ∈ U; (a, b) ∈ r ⟧ ⟹ (a, b) ∈ on U r⇧+"
| trancl_into_trancl[Pure.intro]:
"
⟦ a ∈ U; b ∈ U; c ∈ U; (a, b) ∈ on U r⇧+; (b, c) ∈ r ⟧ ⟹
(a, c) ∈ on U r⇧+
"
abbreviation tranclp_on (‹on _/ (_⇧+⇧+)› [1000, 1000] 1000) where
"tranclp_on ≡ trancl_onp"
declare trancl_on_def[nitpick_unfold del]
lemmas tranclp_on_def = trancl_onp_def
end
definition transp_on :: "['a set, ['a, 'a] ⇒ bool] ⇒ bool"
where "transp_on U = (λr. (∀x∈U. ∀y∈U. ∀z∈U. r x y ⟶ r y z ⟶ r x z))"
definition acyclic_on :: "['a set, ('a × 'a) set] ⇒ bool"
where "acyclic_on U = (λr. (∀x∈U. (x, x) ∉ on U r⇧+))"
lemma trancl_on_eq_tranclp_on:
"on P (λx y. (x, y) ∈ r)⇧+⇧+ x y = ((x, y) ∈ on (Collect P) r⇧+)"
unfolding trancl_on_def tranclp_on_def Set.mem_Collect_eq by simp
lemma trancl_on_imp_U: "(x, y) ∈ on U r⇧+ ⟹ (x, y) ∈ U × U"
by (auto dest: trancl_on.cases)
lemmas tranclp_on_imp_P = trancl_on_imp_U[to_pred, simplified]
lemma trancl_on_imp_trancl: "(x, y) ∈ on U r⇧+ ⟹ (x, y) ∈ r⇧+"
by (induction rule: trancl_on.induct) auto
lemmas tranclp_on_imp_tranclp = trancl_on_imp_trancl[to_pred]
lemma tranclp_eq_tranclp_on: "r⇧+⇧+ = on (λx. True) r⇧+⇧+"
unfolding tranclp_def tranclp_on_def by simp
lemma trancl_eq_trancl_on: "r⇧+ = on UNIV r⇧+"
unfolding trancl_def trancl_on_def by (simp add: tranclp_eq_tranclp_on)
lemma transp_on_empty[simp]: "transp_on {} r" unfolding transp_on_def by simp
lemma transp_eq_transp_on: "transp = transp_on UNIV"
unfolding transp_def transp_on_def by simp
lemma acyclic_on_empty[simp]: "acyclic_on {} r" unfolding acyclic_on_def by simp
lemma acyclic_eq_acyclic_on: "acyclic = acyclic_on UNIV"
unfolding acyclic_def acyclic_on_def
unfolding trancl_def tranclp_def trancl_on_def tranclp_on_def
by simp
subsection‹Transfer rules I: \<^const>‹lfp› transfer›
text‹
The following context contains code from \cite{immler_re_2019}.
›
context
includes lifting_syntax
begin
lemma Inf_transfer[transfer_rule]:
"(rel_set (A ===> (=)) ===> A ===> (=)) Inf Inf"
unfolding Inf_fun_def by transfer_prover
lemma less_eq_pred_transfer[transfer_rule]:
assumes [transfer_rule]: "right_total A"
shows
"((A ===> (=)) ===> (A ===> (=)) ===> (=))
(λf g. ∀x∈Collect(Domainp A). f x ≤ g x) (≤)"
unfolding le_fun_def by transfer_prover
lemma lfp_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "right_total A"
defines "R ≡ (((A ===> (=)) ===> (A ===> (=))) ===> (A ===> (=)))"
shows "R (λf. lfp (λu x. if Domainp A x then f u x else bot)) lfp"
proof -
have "R (λf. Inf {u. ∀x∈Collect (Domainp A). f u x ≤ u x}) lfp"
unfolding R_def lfp_def by transfer_prover
thus ?thesis by (auto simp: le_fun_def lfp_def)
qed
lemma Inf2_transfer[transfer_rule]:
"(rel_set (T ===> T ===> (=)) ===> T ===> T ===> (=)) Inf Inf"
unfolding Inf_fun_def by transfer_prover
lemma less_eq2_pred_transfer[transfer_rule]:
assumes [transfer_rule]: "right_total T"
shows
"((T ===> T ===> (=)) ===> (T ===> T ===> (=)) ===> (=))
(λf g. ∀x∈Collect(Domainp T). ∀y∈Collect(Domainp T). f x y ≤ g x y) (≤)"
unfolding le_fun_def by transfer_prover
lemma lfp2_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "right_total A"
defines
"R ≡
(((A ===> A ===> (=)) ===> (A ===> A ===> (=))) ===> (A ===> A ===> (=)))"
shows
"R
(
λf. lfp
(
λu x y.
if Domainp A x
then if Domainp A y then (f u) x y else bot
else bot
)
)
lfp"
proof -
have
"R
(
λf.
Inf
{
u.
∀x∈Collect (Domainp A). ∀y∈Collect (Domainp A).
(f u) x y ≤ u x y
}
)
lfp"
unfolding R_def lfp_def by transfer_prover
thus ?thesis by (auto simp: le_fun_def lfp_def)
qed
end
subsection‹Transfer rules II: application-specific rules›
context
includes lifting_syntax
begin
lemma transp_rt_transfer[transfer_rule]:
assumes[transfer_rule]: "right_total A"
shows
"((A ===> A ===> (=)) ===> (=)) (transp_on (Collect (Domainp A))) transp"
unfolding transp_def transp_on_def by transfer_prover
lemma tranclp_rt_bu_transfer[transfer_rule]:
assumes[transfer_rule]: "bi_unique A" "right_total A"
shows
"((A ===> A ===> (=)) ===> (A ===> A ===> (=)))
(tranclp_on (Domainp A)) tranclp"
unfolding tranclp_on_def tranclp_def
apply transfer_prover_start
apply transfer_step+
proof
fix r
have
"(
λp x y.
(∃a b. x = a ∧ y = b ∧ Domainp A a ∧ Domainp A b ∧ r a b) ∨
(
∃a b c.
x = a ∧ y = c ∧
Domainp A a ∧ Domainp A b ∧ Domainp A c ∧
p a b ∧ r b c
)
) =
(
λp x y.
if Domainp A x
then if Domainp A y
then
(
∃a∈Collect (Domainp A). ∃b∈Collect (Domainp A).
x = a ∧ y = b ∧ r a b) ∨
(
∃a∈Collect (Domainp A).
∃b∈Collect (Domainp A).
∃c∈Collect (Domainp A).
x = a ∧ y = c ∧ p a b ∧ r b c
)
else bot
else bot
)"
(is "?lhs = ?rhs")
by (intro ext) simp
thus "lfp ?lhs = lfp ?rhs" by clarsimp
qed
lemma trancl_rt_bu_transfer[transfer_rule]:
assumes[transfer_rule]: "bi_unique A" "right_total A"
shows
"(rel_set (rel_prod A A) ===> rel_set (rel_prod A A))
(trancl_on (Collect (Domainp A))) trancl"
unfolding trancl_on_def trancl_def
apply transfer_prover_start
apply transfer_step+
by (auto simp: tranclp_on_imp_P[where U="Domainp A"])
lemma acyclic_rt_bu_transfer[transfer_rule]:
assumes[transfer_rule]: "bi_unique A" "right_total A"
shows
"((rel_set (rel_prod A A)) ===> (=))
(acyclic_on (Collect (Domainp A))) acyclic"
unfolding acyclic_on_def acyclic_def by transfer_prover
end
text‹\newpage›
endTheory SML_Simple_Orders
section‹Relativization of the results about orders›
theory SML_Simple_Orders
imports
"../../Introduction"
"../Foundations/SML_Relations"
Complex_Main
begin
subsection‹Class \<^class>‹ord››
subsubsection‹Definitions and common properties›
locale ord_ow =
fixes U :: "'ao set"
and le :: "['ao, 'ao] ⇒ bool" (infix ‹≤⇩o⇩w› 50)
and ls :: "['ao, 'ao] ⇒ bool" (infix ‹<⇩o⇩w› 50)
begin
notation le (‹'(≤⇩o⇩w')›)
and le (infix ‹≤⇩o⇩w› 50)
and ls (‹'(<⇩o⇩w')›)
and ls (infix ‹<⇩o⇩w› 50)
abbreviation (input) ge (infix ‹≥⇩o⇩w› 50) where "x ≥⇩o⇩w y ≡ y ≤⇩o⇩w x"
abbreviation (input) gt (infix ‹>⇩o⇩w› 50) where "x >⇩o⇩w y ≡ y <⇩o⇩w x"
notation ge (‹'(≥⇩o⇩w')›)
and ge (infix ‹≥⇩o⇩w› 50)
and gt (‹'(>⇩o⇩w')›)
and gt (infix ‹>⇩o⇩w› 50)
tts_register_sbts ‹(≤⇩o⇩w)› | U
proof-
assume "Domainp AOA = (λx. x ∈ U)" "bi_unique AOA" "right_total AOA"
from tts_AA_eq_transfer[OF this] show ?thesis by auto
qed
tts_register_sbts ‹(<⇩o⇩w)› | U
proof-
assume "Domainp AOA = (λx. x ∈ U)" "bi_unique AOA" "right_total AOA"
from tts_AA_eq_transfer[OF this] show ?thesis by auto
qed
end
locale ord_pair_ow = ord⇩1: ord_ow U⇩1 le⇩1 ls⇩1 + ord⇩2: ord_ow U⇩2 le⇩2 ls⇩2
for U⇩1 :: "'ao set" and le⇩1 ls⇩1 and U⇩2 :: "'bo set" and le⇩2 ls⇩2
begin
notation le⇩1 (‹'(≤⇩o⇩w⇩.⇩1')›)
and le⇩1 (infix ‹≤⇩o⇩w⇩.⇩1› 50)
and ls⇩1 (‹'(<⇩o⇩w⇩.⇩1')›)
and ls⇩1 (infix ‹<⇩o⇩w⇩.⇩1› 50)
and le⇩2 (‹'(≤⇩o⇩w⇩.⇩2')›)
and le⇩2 (infix ‹≤⇩o⇩w⇩.⇩2› 50)
and ls⇩2 (‹'(<⇩o⇩w⇩.⇩2')›)
and ls⇩2 (infix ‹<⇩o⇩w⇩.⇩2› 50)
notation ord⇩1.ge (‹'(≥⇩o⇩w⇩.⇩1')›)
and ord⇩1.ge (infix ‹≥⇩o⇩w⇩.⇩1› 50)
and ord⇩1.gt (‹'(>⇩o⇩w⇩.⇩1')›)
and ord⇩1.gt (infix ‹>⇩o⇩w⇩.⇩1› 50)
and ord⇩2.ge (‹'(≥⇩o⇩w⇩.⇩2')›)
and ord⇩2.ge (infix ‹≥⇩o⇩w⇩.⇩2› 50)
and ord⇩2.gt (‹'(>⇩o⇩w⇩.⇩2')›)
and ord⇩2.gt (infix ‹>⇩o⇩w⇩.⇩2› 50)
end
ud ‹ord.lessThan› (‹(with _ : ({..<_}))› [1000] 10)
ud lessThan' ‹lessThan›
ud ‹ord.atMost› (‹(with _ : ({.._}))› [1000] 10)
ud atMost' ‹atMost›
ud ‹ord.greaterThan› (‹(with _ : ({_<..}))› [1000] 10)
ud greaterThan' ‹greaterThan›
ud ‹ord.atLeast› (‹(with _ : ({_..}))› [1000] 10)
ud atLeast' ‹atLeast›
ud ‹ord.greaterThanLessThan› (‹(with _ : ({_<..<_}))› [1000, 999, 1000] 10)
ud greaterThanLessThan' ‹greaterThanLessThan›
ud ‹ord.atLeastLessThan› (‹(with _ _ : ({_..<_}))› [1000, 999, 1000, 1000] 10)
ud atLeastLessThan' ‹atLeastLessThan›
ud ‹ord.greaterThanAtMost› (‹(with _ _ : ({_<.._}))› [1000, 999, 1000, 999] 10)
ud greaterThanAtMost' ‹greaterThanAtMost›
ud ‹ord.atLeastAtMost› (‹(with _ : ({_.._}))› [1000, 1000, 1000] 10)
ud atLeastAtMost' ‹atLeastAtMost›
ud ‹ord.min› (‹(with _ : «min» _ _)› [1000, 1000, 999] 10)
ud min' ‹min›
ud ‹ord.max› (‹(with _ : «max» _ _)› [1000, 1000, 999] 10)
ud max' ‹max›
ctr relativization
synthesis ctr_simps
assumes [transfer_domain_rule, transfer_rule]: "Domainp A = (λx. x ∈ U)"
and [transfer_rule]: "right_total A"
trp (?'a A)
in lessThan_ow: lessThan.with_def
(‹(on _ with _ : ({..<_}))› [1000, 1000, 1000] 10)
and atMost_ow: atMost.with_def
(‹(on _ with _ : ({.._}))› [1000, 1000, 1000] 10)
and greaterThan_ow: greaterThan.with_def
(‹(on _ with _: ({_<..}))› [1000, 1000, 1000] 10)
and atLeast_ow: atLeast.with_def
(‹(on _ with _ : ({_..}))› [1000, 1000, 1000] 10)
ctr relativization
synthesis ctr_simps
assumes [transfer_domain_rule, transfer_rule]: "Domainp A = (λx. x ∈ U)"
and [transfer_rule]: "bi_unique A" "right_total A"
trp (?'a A)
in greaterThanLessThan_ow: greaterThanLessThan.with_def
(‹(on _ with _ : ({_<..<_}))› [1000, 1000, 1000, 1000] 10)
and atLeastLessThan_ow: atLeastLessThan.with_def
(‹(on _ with _ _ : ({_..<_}))› [1000, 1000, 999, 1000, 1000] 10)
and greaterThanAtMost_ow: greaterThanAtMost.with_def
(‹(on _ with _ _ : ({_<.._}))› [1000, 1000, 999, 1000, 1000] 10)
and atLeastAtMost_ow: atLeastAtMost.with_def
(‹(on _ with _ : ({_.._}))› [1000, 1000, 1000, 1000] 10)
ctr parametricity
in min_ow: min.with_def
and max_ow: max.with_def
context ord_ow
begin
abbreviation lessThan :: "'ao ⇒ 'ao set" ("(1{..<⇩o⇩w_})")
where "{..<⇩o⇩w u} ≡ on U with (<⇩o⇩w) : {..<u}"
abbreviation atMost :: "'ao ⇒ 'ao set" ("(1{..⇩o⇩w_})")
where "{..⇩o⇩w u} ≡ on U with (≤⇩o⇩w) : {..u}"
abbreviation greaterThan :: "'ao ⇒ 'ao set" ("(1{_<⇩o⇩w..})")
where "{l<⇩o⇩w..} ≡ on U with (<⇩o⇩w) : {l<..}"
abbreviation atLeast :: "'ao ⇒ 'ao set" ("(1{_..⇩o⇩w})")
where "atLeast l ≡ on U with (≤⇩o⇩w) : {l..}"
abbreviation greaterThanLessThan :: "'ao ⇒ 'ao ⇒ 'ao set" ("(1{_<⇩o⇩w..<⇩o⇩w_})")
where "{l<⇩o⇩w..<⇩o⇩wu} ≡ on U with (<⇩o⇩w) : {l<..<u}"
abbreviation atLeastLessThan :: "'ao ⇒ 'ao ⇒ 'ao set" ("(1{_..<⇩o⇩w_})")
where "{l..<⇩o⇩w u} ≡ on U with (≤⇩o⇩w) (<⇩o⇩w) : {l<..u}"
abbreviation greaterThanAtMost :: "'ao ⇒ 'ao ⇒ 'ao set" ("(1{_<⇩o⇩w.._})")
where "{l<⇩o⇩w..u} ≡ on U with (≤⇩o⇩w) (<⇩o⇩w) : {l<..u}"
abbreviation atLeastAtMost :: "'ao ⇒ 'ao ⇒ 'ao set" ("(1{_..⇩o⇩w_})")
where "{l..⇩o⇩wu} ≡ on U with (≤⇩o⇩w) : {l..u}"
abbreviation min :: "'ao ⇒ 'ao ⇒ 'ao" where "min ≡ min.with (≤⇩o⇩w)"
abbreviation max :: "'ao ⇒ 'ao ⇒ 'ao" where "max ≡ max.with (≤⇩o⇩w)"
end
context ord_pair_ow
begin
notation ord⇩1.lessThan ("(1{..<⇩o⇩w⇩.⇩1_})")
notation ord⇩1.atMost ("(1{..⇩o⇩w⇩.⇩1_})")
notation ord⇩1.greaterThan ("(1{_<⇩o⇩w⇩.⇩1..})")
notation ord⇩1.atLeast ("(1{_..⇩o⇩w⇩.⇩1})")
notation ord⇩1.greaterThanLessThan ("(1{_<⇩o⇩w⇩.⇩1..<⇩o⇩w⇩.⇩1_})")
notation ord⇩1.atLeastLessThan ("(1{_..<⇩o⇩w⇩.⇩1_})")
notation ord⇩1.greaterThanAtMost ("(1{_<⇩o⇩w⇩.⇩1.._})")
notation ord⇩1.atLeastAtMost ("(1{_..⇩o⇩w⇩.⇩1_})")
notation ord⇩2.lessThan ("(1{..<⇩o⇩w⇩.⇩2_})")
notation ord⇩2.atMost ("(1{..⇩o⇩w⇩.⇩2_})")
notation ord⇩2.greaterThan ("(1{_<⇩o⇩w⇩.⇩2..})")
notation ord⇩2.atLeast ("(1{_..⇩o⇩w⇩.⇩2})")
notation ord⇩2.greaterThanLessThan ("(1{_<⇩o⇩w⇩.⇩2..<⇩o⇩w⇩.⇩2_})")
notation ord⇩2.atLeastLessThan ("(1{_..<⇩o⇩w⇩.⇩2_})")
notation ord⇩2.greaterThanAtMost ("(1{_<⇩o⇩w⇩.⇩2.._})")
notation ord⇩2.atLeastAtMost ("(1{_..⇩o⇩w⇩.⇩2_})")
end
subsection‹Preorders›
subsubsection‹Definitions and common properties›
locale preorder_ow = ord_ow U le ls
for U :: "'ao set" and le ls +
assumes less_le_not_le:
"⟦ x ∈ U; y ∈ U ⟧ ⟹ x <⇩o⇩w y ⟷ x ≤⇩o⇩w y ∧ ¬ (y ≤⇩o⇩w x)"
and order_refl[iff]: "x ∈ U ⟹ x ≤⇩o⇩w x"
and order_trans: "⟦ x ∈ U; y ∈ U; z ∈ U; x ≤⇩o⇩w y; y ≤⇩o⇩w z ⟧ ⟹ x ≤⇩o⇩w z"
locale ord_preorder_ow =
ord⇩1: ord_ow U⇩1 le⇩1 ls⇩1 + ord⇩2: preorder_ow U⇩2 le⇩2 ls⇩2
for U⇩1 :: "'ao set" and le⇩1 ls⇩1 and U⇩2 :: "'bo set" and le⇩2 ls⇩2
begin
sublocale ord_pair_ow .
end
locale preorder_pair_ow =
ord⇩1: preorder_ow U⇩1 le⇩1 ls⇩1 + ord⇩2: preorder_ow U⇩2 le⇩2 ls⇩2
for U⇩1 :: "'ao set" and le⇩1 and ls⇩1 and U⇩2 :: "'bo set" and le⇩2 and ls⇩2
begin
sublocale ord_preorder_ow ..
end
ud ‹preorder.bdd_above› (‹(with _ : «bdd'_above» _)› [1000, 1000] 10)
ud bdd_above' ‹bdd_above›
ud ‹preorder.bdd_below› (‹(with _ : «bdd'_below» _)› [1000, 1000] 10)
ud bdd_below' ‹bdd_below›
ctr relativization
synthesis ctr_simps
assumes [transfer_domain_rule, transfer_rule]: "Domainp A = (λx. x ∈ U)"
and [transfer_rule]: "right_total A"
trp (?'a A)
in bdd_above_ow: bdd_above.with_def
(‹(on _ with _ : «bdd'_above» _)› [1000, 1000, 1000] 10)
and bdd_below_ow: bdd_below.with_def
(‹(on _ with _ : «bdd'_below» _)› [1000, 1000, 1000] 10)
context preorder_ow
begin
abbreviation bdd_above :: "'ao set ⇒ bool"
where "bdd_above ≡ bdd_above_ow U (≤⇩o⇩w)"
abbreviation bdd_below :: "'ao set ⇒ bool"
where "bdd_below ≡ bdd_below_ow U (≤⇩o⇩w)"
end
subsubsection‹Transfer rules›
context
includes lifting_syntax
begin
lemma preorder_transfer[transfer_rule]:
assumes [transfer_rule]: "right_total A"
shows
"((A ===> A ===> (=)) ===> (A ===> A ===> (=)) ===> (=))
(preorder_ow (Collect (Domainp A))) class.preorder"
unfolding preorder_ow_def class.preorder_def
apply transfer_prover_start
apply transfer_step+
by blast
end
subsubsection‹Relativization›
context preorder_ow
begin
tts_context
tts: (?'a to U)
rewriting ctr_simps
substituting preorder_ow_axioms
eliminating through auto
begin
tts_lemma less_irrefl:
assumes "x ∈ U"
shows "¬ x <⇩o⇩w x"
is preorder_class.less_irrefl.
tts_lemma bdd_below_Ioc:
assumes "a ∈ U" and "b ∈ U"
shows "bdd_below {a<⇩o⇩w..b}"
is preorder_class.bdd_below_Ioc.
tts_lemma bdd_above_Ioc:
assumes "a ∈ U" and "b ∈ U"
shows "bdd_above {a<⇩o⇩w..b}"
is preorder_class.bdd_above_Ioc.
tts_lemma bdd_above_Iic:
assumes "b ∈ U"
shows "bdd_above {..⇩o⇩wb}"
is preorder_class.bdd_above_Iic.
tts_lemma bdd_above_Iio:
assumes "b ∈ U"
shows "bdd_above {..<⇩o⇩wb}"
is preorder_class.bdd_above_Iio.
tts_lemma bdd_below_Ici:
assumes "a ∈ U"
shows "bdd_below {a..⇩o⇩w}"
is preorder_class.bdd_below_Ici.
tts_lemma bdd_below_Ioi:
assumes "a ∈ U"
shows "bdd_below {a<⇩o⇩w..}"
is preorder_class.bdd_below_Ioi.
tts_lemma bdd_above_Icc:
assumes "a ∈ U" and "b ∈ U"
shows "bdd_above {a..⇩o⇩wb}"
is preorder_class.bdd_above_Icc.
tts_lemma bdd_above_Ioo:
assumes "a ∈ U" and "b ∈ U"
shows "bdd_above {a<⇩o⇩w..<⇩o⇩wb}"
is preorder_class.bdd_above_Ioo.
tts_lemma bdd_below_Icc:
assumes "a ∈ U" and "b ∈ U"
shows "bdd_below {a..⇩o⇩wb}"
is preorder_class.bdd_below_Icc.
tts_lemma bdd_below_Ioo:
assumes "a ∈ U" and "b ∈ U"
shows "bdd_below {a<⇩o⇩w..<⇩o⇩wb}"
is preorder_class.bdd_below_Ioo.
tts_lemma bdd_above_Ico:
assumes "a ∈ U" and "b ∈ U"
shows "bdd_above (on U with (≤⇩o⇩w) (<⇩o⇩w) : {a..<b})"
is preorder_class.bdd_above_Ico.
tts_lemma bdd_below_Ico:
assumes "a ∈ U" and "b ∈ U"
shows "bdd_below (on U with (≤⇩o⇩w) (<⇩o⇩w) : {a..<b})"
is preorder_class.bdd_below_Ico.
tts_lemma Ioi_le_Ico:
assumes "a ∈ U"
shows "{a<⇩o⇩w..} ⊆ {a..⇩o⇩w}"
is preorder_class.Ioi_le_Ico.
tts_lemma eq_refl:
assumes "y ∈ U" and "x = y"
shows "x ≤⇩o⇩w y"
is preorder_class.eq_refl.
tts_lemma less_imp_le:
assumes "x ∈ U" and "y ∈ U" and "x <⇩o⇩w y"
shows "x ≤⇩o⇩w y"
is preorder_class.less_imp_le.
tts_lemma less_not_sym:
assumes "x ∈ U" and "y ∈ U" and "x <⇩o⇩w y"
shows "¬ y <⇩o⇩w x"
is preorder_class.less_not_sym.
tts_lemma less_imp_not_less:
assumes "x ∈ U" and "y ∈ U" and "x <⇩o⇩w y"
shows "(¬ y <⇩o⇩w x) = True"
is preorder_class.less_imp_not_less.
tts_lemma less_asym':
assumes "a ∈ U" and "b ∈ U" and "a <⇩o⇩w b" and "b <⇩o⇩w a"
shows P
is preorder_class.less_asym'.
tts_lemma less_imp_triv:
assumes "x ∈ U" and "y ∈ U" and "x <⇩o⇩w y"
shows "(y <⇩o⇩w x ⟶ P) = True"
is preorder_class.less_imp_triv.
tts_lemma less_trans:
assumes "x ∈ U" and "y ∈ U" and "z ∈ U" and "x <⇩o⇩w y" and "y <⇩o⇩w z"
shows "x <⇩o⇩w z"
is preorder_class.less_trans.
tts_lemma less_le_trans:
assumes "x ∈ U" and "y ∈ U" and "z ∈ U" and "x <⇩o⇩w y" and "y ≤⇩o⇩w z"
shows "x <⇩o⇩w z"
is preorder_class.less_le_trans.
tts_lemma le_less_trans:
assumes "x ∈ U" and "y ∈ U" and "z ∈ U" and "x ≤⇩o⇩w y" and "y <⇩o⇩w z"
shows "x <⇩o⇩w z"
is preorder_class.le_less_trans.
tts_lemma bdd_aboveI:
assumes "A ⊆ U" and "M ∈ U" and "⋀x. ⟦x ∈ U; x ∈ A⟧ ⟹ x ≤⇩o⇩w M"
shows "bdd_above A"
is preorder_class.bdd_aboveI.
tts_lemma bdd_belowI:
assumes "A ⊆ U" and "m ∈ U" and "⋀x. ⟦x ∈ U; x ∈ A⟧ ⟹ m ≤⇩o⇩w x"
shows "bdd_below A"
is preorder_class.bdd_belowI.
tts_lemma less_asym:
assumes "x ∈ U" and "y ∈ U" and "x <⇩o⇩w y" and "¬ P ⟹ y <⇩o⇩w x"
shows P
is preorder_class.less_asym.
tts_lemma transp_less: "transp_on U (<⇩o⇩w)"
is preorder_class.transp_less.
tts_lemma transp_le: "transp_on U (≤⇩o⇩w)"
is preorder_class.transp_le.
tts_lemma transp_gr: "transp_on U (λx y. y <⇩o⇩w x)"
is preorder_class.transp_gr.
tts_lemma transp_ge: "transp_on U (λx y. y ≤⇩o⇩w x)"
is preorder_class.transp_ge.
tts_lemma bdd_above_Int1:
assumes "A ⊆ U" and "B ⊆ U" and "bdd_above A"
shows "bdd_above (A ∩ B)"
is preorder_class.bdd_above_Int1.
tts_lemma bdd_above_Int2:
assumes "B ⊆ U" and "A ⊆ U" and "bdd_above B"
shows "bdd_above (A ∩ B)"
is preorder_class.bdd_above_Int2.
tts_lemma bdd_below_Int1:
assumes "A ⊆ U" and "B ⊆ U" and "bdd_below A"
shows "bdd_below (A ∩ B)"
is preorder_class.bdd_below_Int1.
tts_lemma bdd_below_Int2:
assumes "B ⊆ U" and "A ⊆ U" and "bdd_below B"
shows "bdd_below (A ∩ B)"
is preorder_class.bdd_below_Int2.
tts_lemma bdd_above_mono:
assumes "B ⊆ U" and "bdd_above B" and "A ⊆ B"
shows "bdd_above A"
is preorder_class.bdd_above_mono.
tts_lemma bdd_below_mono:
assumes "B ⊆ U" and "bdd_below B" and "A ⊆ B"
shows "bdd_below A"
is preorder_class.bdd_below_mono.
tts_lemma atLeastAtMost_subseteq_atLeastLessThan_iff:
assumes "a ∈ U"
and "b ∈ U"
and "c ∈ U"
and "d ∈ U"
shows "({a..⇩o⇩wb} ⊆ (on U with (≤⇩o⇩w) (<⇩o⇩w) : {c..<d})) =
(a ≤⇩o⇩w b ⟶ b <⇩o⇩w d ∧ c ≤⇩o⇩w a)"
is preorder_class.atLeastAtMost_subseteq_atLeastLessThan_iff.
tts_lemma atMost_subset_iff:
assumes "x ∈ U" and "y ∈ U"
shows "({..⇩o⇩wx} ⊆ {..⇩o⇩wy}) = (x ≤⇩o⇩w y)"
is Set_Interval.atMost_subset_iff.
tts_lemma single_Diff_lessThan:
assumes "k ∈ U"
shows "{k} - {..<⇩o⇩wk} = {k}"
is Set_Interval.single_Diff_lessThan.
tts_lemma atLeast_subset_iff:
assumes "x ∈ U" and "y ∈ U"
shows "({x..⇩o⇩w} ⊆ {y..⇩o⇩w}) = (y ≤⇩o⇩w x)"
is Set_Interval.atLeast_subset_iff.
tts_lemma atLeastatMost_psubset_iff:
assumes "a ∈ U" and "b ∈ U" and "c ∈ U" and "d ∈ U"
shows
"({a..⇩o⇩wb} ⊂ {c..⇩o⇩wd}) =
(c ≤⇩o⇩w d ∧ (¬ a ≤⇩o⇩w b ∨ c ≤⇩o⇩w a ∧ b ≤⇩o⇩w d ∧ (c <⇩o⇩w a ∨ b <⇩o⇩w d)))"
is preorder_class.atLeastatMost_psubset_iff.
tts_lemma not_empty_eq_Iic_eq_empty:
assumes "h ∈ U"
shows "{} ≠ {..⇩o⇩wh}"
is preorder_class.not_empty_eq_Iic_eq_empty.
tts_lemma atLeastatMost_subset_iff:
assumes "a ∈ U" and "b ∈ U" and "c ∈ U" and "d ∈ U"
shows "({a..⇩o⇩wb} ⊆ {c..⇩o⇩wd}) = (¬ a ≤⇩o⇩w b ∨ b ≤⇩o⇩w d ∧ c ≤⇩o⇩w a)"
is preorder_class.atLeastatMost_subset_iff.
tts_lemma Icc_subset_Ici_iff:
assumes "l ∈ U" and "h ∈ U" and "l' ∈ U"
shows "({l..⇩o⇩wh} ⊆ {l'..⇩o⇩w}) = (¬ l ≤⇩o⇩w h ∨ l' ≤⇩o⇩w l)"
is preorder_class.Icc_subset_Ici_iff.
tts_lemma Icc_subset_Iic_iff:
assumes "l ∈ U" and "h ∈ U" and "h' ∈ U"
shows "({l..⇩o⇩wh} ⊆ {..⇩o⇩wh'}) = (¬ l ≤⇩o⇩w h ∨ h ≤⇩o⇩w h')"
is preorder_class.Icc_subset_Iic_iff.
tts_lemma not_empty_eq_Ici_eq_empty:
assumes "l ∈ U"
shows "{} ≠ {l..⇩o⇩w}"
is preorder_class.not_empty_eq_Ici_eq_empty.
tts_lemma not_Ici_eq_empty:
assumes "l ∈ U"
shows "{l..⇩o⇩w} ≠ {}"
is preorder_class.not_Ici_eq_empty.
tts_lemma not_Iic_eq_empty:
assumes "h ∈ U"
shows "{..⇩o⇩wh} ≠ {}"
is preorder_class.not_Iic_eq_empty.
tts_lemma atLeastatMost_empty_iff2:
assumes "a ∈ U" and "b ∈ U"
shows "({} = {a..⇩o⇩wb}) = (¬ a ≤⇩o⇩w b)"
is preorder_class.atLeastatMost_empty_iff2.
tts_lemma atLeastLessThan_empty_iff2:
assumes "a ∈ U" and "b ∈ U"
shows "({} = (on U with (≤⇩o⇩w) (<⇩o⇩w) : {a..<b})) = (¬ a <⇩o⇩w b)"
is preorder_class.atLeastLessThan_empty_iff2.
tts_lemma greaterThanAtMost_empty_iff2:
assumes "k ∈ U" and "l ∈ U"
shows "({} = {k<⇩o⇩w..l}) = (¬ k <⇩o⇩w l)"
is preorder_class.greaterThanAtMost_empty_iff2.
tts_lemma atLeastatMost_empty_iff:
assumes "a ∈ U" and "b ∈ U"
shows "({a..⇩o⇩wb} = {}) = (¬ a ≤⇩o⇩w b)"
is preorder_class.atLeastatMost_empty_iff.
tts_lemma atLeastLessThan_empty_iff:
assumes "a ∈ U" and "b ∈ U"
shows "((on U with (≤⇩o⇩w) (<⇩o⇩w) : {a..<b}) = {}) = (¬ a <⇩o⇩w b)"
is preorder_class.atLeastLessThan_empty_iff.
tts_lemma greaterThanAtMost_empty_iff:
assumes "k ∈ U" and "l ∈ U"
shows "({k<⇩o⇩w..l} = {}) = (¬ k <⇩o⇩w l)"
is preorder_class.greaterThanAtMost_empty_iff.
end
tts_context
tts: (?'a to U)
substituting preorder_ow_axioms
begin
tts_lemma bdd_above_empty:
assumes "U ≠ {}"
shows "bdd_above {}"
is preorder_class.bdd_above_empty.
tts_lemma bdd_below_empty:
assumes "U ≠ {}"
shows "bdd_below {}"
is preorder_class.bdd_below_empty.
end
tts_context
tts: (?'a to U) and (?'b to ‹U⇩2::'a set›)
rewriting ctr_simps
substituting preorder_ow_axioms
eliminating through (auto intro: bdd_above_empty bdd_below_empty)
begin
tts_lemma bdd_belowI2:
assumes "A ⊆ U⇩2"
and "m ∈ U"
and "∀x∈U⇩2. f x ∈ U"
and "⋀x. x ∈ A ⟹ m ≤⇩o⇩w f x"
shows "bdd_below (f ` A)"
given preorder_class.bdd_belowI2
by blast
tts_lemma bdd_aboveI2:
assumes "A ⊆ U⇩2"
and "∀x∈U⇩2. f x ∈ U"
and "M ∈ U"
and "⋀x. x ∈ A ⟹ f x ≤⇩o⇩w M"
shows "bdd_above (f ` A)"
given preorder_class.bdd_aboveI2
by blast
end
end
subsection‹Partial orders›
subsubsection‹Definitions and common properties›
locale ordering_ow =
fixes U :: "'ao set"
and le :: "'ao ⇒ 'ao ⇒ bool" (infix "❙≤⇩o⇩w" 50)
and ls :: "'ao ⇒ 'ao ⇒ bool" (infix "❙<⇩o⇩w" 50)
assumes strict_iff_order: "⟦ a ∈ U; b ∈ U ⟧ ⟹ a ❙<⇩o⇩w b ⟷ a ❙≤⇩o⇩w b ∧ a ≠ b"
and refl: "a ∈ U ⟹ a ❙≤⇩o⇩w a"
and antisym: "⟦ a ∈ U; b ∈ U; a ❙≤⇩o⇩w b; b ❙≤⇩o⇩w a ⟧ ⟹ a = b"
and trans: "⟦ a ∈ U; b ∈ U; c ∈ U; a ❙≤⇩o⇩w b; b ❙≤⇩o⇩w c ⟧ ⟹ a ❙≤⇩o⇩w c"
begin
notation le (infix "❙≤⇩o⇩w" 50)
and ls (infix "❙<⇩o⇩w" 50)
end
locale order_ow = preorder_ow U le ls for U :: "'ao set" and le ls +
assumes antisym: "⟦ x ∈ U; y ∈ U; x ≤⇩o⇩w y; y ≤⇩o⇩w x ⟧ ⟹ x = y"
begin
sublocale
order: ordering_ow U ‹(≤⇩o⇩w)› ‹(<⇩o⇩w)› +
dual_order: ordering_ow U ‹(≥⇩o⇩w)› ‹(>⇩o⇩w)›
apply unfold_locales
subgoal by (force simp: less_le_not_le antisym)
subgoal by simp
subgoal by (simp add: antisym)
subgoal by (metis order_trans)
subgoal by (force simp: less_le_not_le antisym)
subgoal by (simp add: antisym)
subgoal by (metis order_trans)
done
no_notation le (infix "❙≤⇩o⇩w" 50)
and ls (infix "❙<⇩o⇩w" 50)
end
locale ord_order_ow = ord⇩1: ord_ow U⇩1 le⇩1 ls⇩1 + ord⇩2: order_ow U⇩2 le⇩2 ls⇩2
for U⇩1 :: "'ao set" and le⇩1 ls⇩1 and U⇩2 :: "'bo set" and le⇩2 ls⇩2
sublocale ord_order_ow ⊆ ord_preorder_ow ..
locale preorder_order_ow =
ord⇩1: preorder_ow U⇩1 le⇩1 ls⇩1 + ord⇩2: order_ow U⇩2 le⇩2 ls⇩2
for U⇩1 :: "'ao set" and le⇩1 ls⇩1 and U⇩2 :: "'bo set" and le⇩2 ls⇩2
sublocale preorder_order_ow ⊆ preorder_pair_ow ..
locale order_pair_ow = ord⇩1: order_ow U⇩1 le⇩1 ls⇩1 + ord⇩2: order_ow U⇩2 le⇩2 ls⇩2
for U⇩1 :: "'ao set" and le⇩1 ls⇩1 and U⇩2 :: "'bo set" and le⇩2 ls⇩2
sublocale order_pair_ow ⊆ preorder_order_ow ..
ud ‹order.mono› (‹(with _ _ : «mono» _)› [1000, 999, 1000] 10)
ud mono' ‹mono›
ud ‹order.strict_mono› (‹(with _ _ : «strict'_mono» _)› [1000, 999, 1000] 10)
ud strict_mono' ‹strict_mono›
ud ‹order.antimono› (‹(with _ _ : «strict'_mono» _)› [1000, 999, 1000] 10)
ud antimono' ‹antimono›
ud ‹monoseq› (‹(with _ : «monoseq» _)› [1000, 1000] 10)
ctr relativization
synthesis ctr_simps
assumes [transfer_domain_rule, transfer_rule]:
"Domainp (B::'c⇒'d⇒bool) = (λx. x ∈ U⇩2)"
and [transfer_rule]: "right_total B"
trp (?'b ‹A::'a⇒'b⇒bool›) and (?'a B)
in mono_ow: mono.with_def
(‹(on _ with _ _ : «mono» _)› [1000, 1000, 999, 1000] 10)
and strict_mono_ow: strict_mono.with_def
(‹(on _ with _ _ : «strict'_mono» _)› [1000, 1000, 999, 1000] 10)
and antimono_ow: antimono.with_def
(‹(on _ with _ _ : «antimono» _)› [1000, 1000, 999, 1000] 10)
and monoseq_ow: monoseq.with_def
subsubsection‹Transfer rules›
context
includes lifting_syntax
begin
lemma ordering_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "right_total A"
shows
"((A ===> A ===> (=)) ===> (A ===> A ===> (=)) ===> (=))
(ordering_ow (Collect (Domainp A))) ordering"
unfolding ordering_ow_def ordering_def
apply transfer_prover_start
apply transfer_step+
unfolding Ball_Collect[symmetric]
by (intro ext HOL.arg_cong2[where f="(∧)"]) auto
lemma order_transfer[transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "right_total A"
shows
"((A ===> A ===> (=)) ===> (A ===> A ===> (=)) ===> (=))
(order_ow (Collect (Domainp A))) class.order"
unfolding
order_ow_def class.order_def order_ow_axioms_def class.order_axioms_def
apply transfer_prover_start
apply transfer_step+
by simp
end
subsubsection‹Relativization›
context ordering_ow
begin
tts_context
tts: (?'a to U)
rewriting ctr_simps
substituting ordering_ow_axioms
eliminating through simp
begin
tts_lemma irrefl:
assumes "a ∈ U"
shows "¬ a ❙<⇩o⇩w a"
is ordering.irrefl.
tts_lemma strict_implies_order:
assumes "a ∈ U" and "b ∈ U" and "a ❙<⇩o⇩w b"
shows "a ❙≤⇩o⇩w b"
is ordering.strict_implies_order.
tts_lemma strict_implies_not_eq:
assumes "a ∈ U" and "b ∈ U" and "a ❙<⇩o⇩w b"
shows "a ≠ b"
is ordering.strict_implies_not_eq.
tts_lemma order_iff_strict:
assumes "a ∈ U" and "b ∈ U"
shows "(a ❙≤⇩o⇩w b) = (a ❙<⇩o⇩w b ∨ a = b)"
is ordering.order_iff_strict.
tts_lemma asym:
assumes "a ∈ U" and "b ∈ U" and "a ❙<⇩o⇩w b" and "b ❙<⇩o⇩w a"
shows False
is ordering.asym.
tts_lemma strict_trans:
assumes "a ∈ U" and "b ∈ U" and "c ∈ U" and "a ❙<⇩o⇩w b" and "b ❙<⇩o⇩w c"
shows "a ❙<⇩o⇩w c"
is ordering.strict_trans.
tts_lemma strict_trans2:
assumes "a ∈ U" and "b ∈ U" and "c ∈ U" and "a ❙<⇩o⇩w b" and "b ❙≤⇩o⇩w c"
shows "a ❙<⇩o⇩w c"
is ordering.strict_trans2.
tts_lemma strict_trans1:
assumes "a ∈ U" and "b ∈ U" and "c ∈ U" and "a ❙≤⇩o⇩w b" and "b ❙<⇩o⇩w c"
shows "a ❙<⇩o⇩w c"
is ordering.strict_trans1.
tts_lemma not_eq_order_implies_strict:
assumes "a ∈ U" and "b ∈ U" and "a ≠ b" and "a ❙≤⇩o⇩w b"
shows "a ❙<⇩o⇩w b"
is ordering.not_eq_order_implies_strict.
end
end
context order_ow
begin
tts_context
tts: (?'a to U)
rewriting ctr_simps
substituting order_ow_axioms
eliminating through